summaryrefslogtreecommitdiff
path: root/tex/context/modules/common/s-obsolete-tokens.mkiv
blob: 81467fc6fd5f29585ca5ead1de1d4d56a01b3fc0 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
%D \module
%D   [       file=toks-tra, % was toks-ini
%D        version=2007.03.03,
%D          title=\CONTEXT\ Obsolete Modules,
%D       subtitle=Tokens,
%D         author=Hans Hagen,
%D           date=\currentdate,
%D      copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.

%D The code here used to be in the \type {toks-tra} files which were made in
%D the real early days of \LUATEX\ and used in articles and presentations
%D about this engine. Because the code is used in manuals we keep it around
%D as module.

\writestatus{loading}{ConTeXt Obsolete Modules / Tokens}

%D This used to be in \type {toks-tra.lua}:

\startluacode

if not modules then modules = { } end modules ['s-obsolete-tokens'] = {
    version   = 1.001,
    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
    copyright = "PRAGMA ADE / ConTeXt Development Team",
    license   = "see context related readme files"
}

local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
local format, gsub = string.format, string.gsub
local tostring = tostring

local tokens   = tokens
local token    = token -- the built in one
local tex      = tex
local context  = context
local commands = commands

tokens.collectors     = tokens.collectors or { }
local collectors      = tokens.collectors

collectors.data       = collectors.data or { }
local collectordata   = collectors.data

collectors.registered = collectors.registered or { }
local registered      = collectors.registered

local report          = logs.reporter("tokens","collectors")

-- todo:
--
-- register : macros that will be expanded (only for demo-ing)
-- flush    : print back to tex
-- test     : fancy stuff

local get_next     = token.get_next
local create_token = token.create

function collectors.install(tag,end_cs)
    local data, d = { }, 0
    collectordata[tag] = data
    end_cs = gsub(end_cs,"^\\","")
    while true do
        local t = get_next()
        if t.csname == end_cs then
            context[end_cs]()
            return
        else
            d = d + 1
            data[d] = t
        end
    end
end

local simple = { letter = "letter", other_char = "other" }

function collectors.show(data)
    -- We no longer have methods as we only used (in demos) method a
    -- so there is no need to burden the core with this. We have a
    -- different table anyway.
    if type(data) == "string" then
        data = collectordata[data]
    end
    if not data then
        return
    end
    local ctx_NC       = context.NC
    local ctx_NR       = context.NR
    local ctx_bold     = context.bold
    local ctx_verbatim = context.verbatim
    local function show(data)
        for i=1,#data do
            local token = data[i]
            if type(token) == "table" then
                show(token)
            else
                local cmdname = token.cmdname
                local simple  = simple[cmdname]
                ctx_NC()
                ctx_verbatim(simple or cmdname)
                ctx_NC()
                ctx_verbatim(simple and utfchar(token.index) or token.csname)
                ctx_NC()
                if token.active     then context("active ") end
                if token.expandable then context("expandable ") end
                if token.protected  then context("protected ") end
                ctx_NC()
                ctx_NR()
            end
        end
    end
    context.starttabulate { "|Tl|Tc|Tl|" }
        ctx_NC() ctx_bold("cmd")
        ctx_NC() ctx_bold("meaning")
        ctx_NC() ctx_bold("properties")
        ctx_NC() ctx_NR()
        context.HL()
        show(data)
    context.stoptabulate()
end

local function printlist(data)
    if data and #data > 0 then
        report("not supported (yet): printing back to tex")
    end
end

tokens.printlist = printlist -- will change to another namespace

function collectors.flush(tag)
    printlist(collectordata[tag])
end

function collectors.test(tag,handle)
    report("not supported (yet): testing")
end

function collectors.register(name)
    report("not supported (yet): registering")
end

-- -- old token code
--
--  -- 1 = command, 2 = modifier (char), 3 = controlsequence id
--
--  local create       = token.create
--  local csname_id    = token.csname_id
--  local command_id   = token.command_id
--  local command_name = token.command_name
--  local get_next     = token.get_next
--  local expand       = token.expand
--  local csname_name  = token.csname_name
--
--  local function printlist(data)
--      if data and #data > 0 then
--          callbacks.push('token_filter', function ()
--             callbacks.pop('token_filter') -- tricky but the nil assignment helps
--             return data
--          end)
--      end
--  end
--
--  tokens.printlist = printlist -- will change to another namespace
--
--  function collectors.flush(tag)
--      printlist(collectordata[tag])
--  end
--
--  function collectors.register(name)
--      registered[csname_id(name)] = name
--  end
--
--  local call   = command_id("call")
--  local letter = command_id("letter")
--  local other  = command_id("other_char")
--
--  function collectors.install(tag,end_cs)
--      local data, d = { }, 0
--      collectordata[tag] = data
--      end_cs = gsub(end_cs,"^\\","")
--      local endcs = csname_id(end_cs)
--      while true do
--          local t = get_next()
--          local a, b = t[1], t[3]
--          if b == endcs then
--              context[end_cs]()
--              return
--          elseif a == call and registered[b] then
--              expand()
--          else
--              d = d + 1
--              data[d] = t
--          end
--      end
--  end
--
--  function collectors.show(data)
--      -- We no longer have methods as we only used (in demos) method a
--      -- so there is no need to burden the core with this.
--      if type(data) == "string" then
--          data = collectordata[data]
--      end
--      if not data then
--          return
--      end
--      local ctx_NC       = context.NC
--      local ctx_NR       = context.NR
--      local ctx_bold     = context.bold
--      local ctx_verbatim = context.verbatim
--      context.starttabulate { "|T|Tr|cT|Tr|T|" }
--      ctx_NC() ctx_bold("cmd")
--      ctx_NC() ctx_bold("chr")
--      ctx_NC()
--      ctx_NC() ctx_bold("id")
--      ctx_NC() ctx_bold("name")
--      ctx_NC() ctx_NR()
--      context.HL()
--      for i=1,#data do
--          local token = data[i]
--          local cmd   = token[1]
--          local chr   = token[2]
--          local id    = token[3]
--          local name  = command_name(token)
--          ctx_NC()
--          ctx_verbatim(name)
--          ctx_NC()
--          if tonumber(chr) >= 0 then
--              ctx_verbatim(chr)
--          end
--          ctx_NC()
--          if cmd == letter or cmd == other then
--              ctx_verbatim(utfchar(chr))
--          end
--          ctx_NC()
--          if id > 0 then
--              ctx_verbatim(id)
--          end
--          ctx_NC()
--          if id > 0 then
--              ctx_verbatim(csname_name(token) or "")
--          end
--          ctx_NC() ctx_NR()
--      end
--      context.stoptabulate()
--  end
--
--  function collectors.test(tag,handle)
--      local t, w, tn, wn = { }, { }, 0, 0
--      handle = handle or collectors.defaultwords
--      local tagdata = collectordata[tag]
--      for k=1,#tagdata do
--          local v = tagdata[k]
--          if v[1] == letter then
--              wn = wn + 1
--              w[wn] = v[2]
--          else
--              if wn > 0 then
--                  handle(t,w)
--                  wn = 0
--              end
--              tn = tn + 1
--              t[tn] = v
--          end
--      end
--      if wn > 0 then
--          handle(t,w)
--      end
--      collectordata[tag] = t
--  end

-- Interfacing:

commands.collecttokens = collectors.install
commands.showtokens    = collectors.show
commands.flushtokens   = collectors.flush
commands.testtokens    = collectors.test
commands.registertoken = collectors.register

-- Redundant:

-- function collectors.test(tag)
--     printlist(collectordata[tag])
-- end

-- For old times sake:

collectors.dowithwords = collectors.test

-- This is only used in old articles ... will move to a module:

tokens.vbox   = create_token("vbox")
tokens.hbox   = create_token("hbox")
tokens.vtop   = create_token("vtop")
tokens.bgroup = create_token(utfbyte("{"),1)
tokens.egroup = create_token(utfbyte("}"),2)

tokens.letter = function(chr) return create_token(utfbyte(chr),11) end
tokens.other  = function(chr) return create_token(utfbyte(chr),12) end

tokens.letters = function(str)
    local t, n = { }, 0
    for chr in utfvalues(str) do
        n = n + 1
        t[n] = create_token(chr, 11)
    end
    return t
end

function collectors.defaultwords(t,str)
    if t then
        local n = #t
        n = n + 1 ; t[n] = tokens.bgroup
        n = n + 1 ; t[n] = create_token("red")
        for i=1,#str do
            n = n + 1 ; t[n] = tokens.other('*')
        end
        n = n + 1 ; t[n] = tokens.egroup
    end
end

\stopluacode

%D This used to be in \type {toks-tra.mkiv}:

% used to be: \registerctxluafile{toks-tra}{}

\unprotect

%D Handy for manuals \unknown\ but not really used in practice, so it might
%D become a runtime loaded module instead.

\unexpanded\def\starttokens  [#1]{\ctxcommand{collecttokens("#1","stoptokens")}}
           \let\stoptokens        \relax
\unexpanded\def\flushtokens  [#1]{\ctxcommand{flushtokens("#1")}}
\unexpanded\def\showtokens   [#1]{\ctxcommand{showtokens("#1")}}
\unexpanded\def\testtokens   [#1]{\ctxcommand{testtokens("#1")}}
\unexpanded\def\registertoken  #1{\ctxcommand{registertoken("#1")}}

\let\toks_show\showtokens % we also support the primitive

\unexpanded\def\showtokens{\doifelsenextoptional\toks_show\normalshowtokens}

\protect \endinput