toks-tra.lua /size: 8178 b    last modification: 2020-07-01 14:35
1
if
not
modules
then
modules
=
{
}
end
modules
[
'
toks-ini
'
]
=
{
2
version
=
1
.
001
,
3
author
=
"
Hans Hagen, PRAGMA-ADE, Hasselt NL
"
,
4
copyright
=
"
PRAGMA ADE / ConTeXt Development Team
"
,
5
license
=
"
see context related readme files
"
6
}
7 8
-- this will become a module
9 10
local
utfbyte
,
utfchar
,
utfvalues
=
utf
.
byte
,
utf
.
char
,
utf
.
values
11
local
format
,
gsub
=
string
.
format
,
string
.
gsub
12
local
tostring
=
tostring
13 14
local
tokens
=
tokens
15
local
token
=
token
-- the built in one
16
local
tex
=
tex
17
local
context
=
context
18
local
commands
=
commands
19 20
tokens
.
collectors
=
tokens
.
collectors
or
{
}
21
local
collectors
=
tokens
.
collectors
22 23
collectors
.
data
=
collectors
.
data
or
{
}
24
local
collectordata
=
collectors
.
data
25 26
collectors
.
registered
=
collectors
.
registered
or
{
}
27
local
registered
=
collectors
.
registered
28 29
local
report
=
logs
.
reporter
(
"
tokens
"
,
"
collectors
"
)
30 31
-- todo:
32
--
33
-- register : macros that will be expanded (only for demo-ing)
34
-- flush : print back to tex
35
-- test : fancy stuff
36 37
local
get_next
=
token
.
get_next
38
local
create_token
=
token
.
create
39 40
function
collectors
.
install
(
tag
,
end_cs
)
41
local
data
,
d
=
{
}
,
0
42
collectordata
[
tag
]
=
data
43
end_cs
=
gsub
(
end_cs
,
"
^\\
"
,
"
"
)
44
while
true
do
45
local
t
=
get_next
(
)
46
if
t
.
csname
=
=
end_cs
then
47
context
[
end_cs
]
(
)
48
return
49
else
50
d
=
d
+
1
51
data
[
d
]
=
t
52
end
53
end
54
end
55 56
local
simple
=
{
letter
=
"
letter
"
,
other_char
=
"
other
"
}
57 58
function
collectors
.
show
(
data
)
59
-- We no longer have methods as we only used (in demos) method a
60
-- so there is no need to burden the core with this. We have a
61
-- different table anyway.
62
if
type
(
data
)
=
=
"
string
"
then
63
data
=
collectordata
[
data
]
64
end
65
if
not
data
then
66
return
67
end
68
local
ctx_NC
=
context
.
NC
69
local
ctx_NR
=
context
.
NR
70
local
ctx_bold
=
context
.
bold
71
local
ctx_verbatim
=
context
.
verbatim
72
context
.
starttabulate
{
"
|Tl|Tc|Tl|
"
}
73
ctx_NC
(
)
ctx_bold
(
"
cmd
"
)
74
ctx_NC
(
)
ctx_bold
(
"
meaning
"
)
75
ctx_NC
(
)
ctx_bold
(
"
properties
"
)
76
ctx_NC
(
)
ctx_NR
(
)
77
context
.
HL
(
)
78
for
i
=
1
,
#
data
do
79
local
token
=
data
[
i
]
80
local
cmdname
=
token
.
cmdname
81
local
simple
=
simple
[
cmdname
]
82
ctx_NC
(
)
83
ctx_verbatim
(
simple
or
cmdname
)
84
ctx_NC
(
)
85
ctx_verbatim
(
simple
and
utfchar
(
token
.
mode
)
or
token
.
csname
)
86
ctx_NC
(
)
87
if
token
.
active
then
context
(
"
active
"
)
end
88
if
token
.
expandable
then
context
(
"
expandable
"
)
end
89
if
token
.
protected
then
context
(
"
protected
"
)
end
90
ctx_NC
(
)
91
ctx_NR
(
)
92
end
93
context
.
stoptabulate
(
)
94
end
95 96
local
function
printlist
(
data
)
97
if
data
and
#
data
>
0
then
98
report
(
"
not supported (yet): printing back to tex
"
)
99
end
100
end
101 102
tokens
.
printlist
=
printlist
-- will change to another namespace
103 104
function
collectors
.
flush
(
tag
)
105
printlist
(
collectordata
[
tag
]
)
106
end
107 108
function
collectors
.
test
(
tag
,
handle
)
109
report
(
"
not supported (yet): testing
"
)
110
end
111 112
function
collectors
.
register
(
name
)
113
report
(
"
not supported (yet): registering
"
)
114
end
115 116
-- -- old token code
117
--
118
-- -- 1 = command, 2 = modifier (char), 3 = controlsequence id
119
--
120
-- local create = token.create
121
-- local csname_id = token.csname_id
122
-- local command_id = token.command_id
123
-- local command_name = token.command_name
124
-- local get_next = token.get_next
125
-- local expand = token.expand
126
-- local csname_name = token.csname_name
127
--
128
-- local function printlist(data)
129
-- if data and #data > 0 then
130
-- callbacks.push('token_filter', function ()
131
-- callbacks.pop('token_filter') -- tricky but the nil assignment helps
132
-- return data
133
-- end)
134
-- end
135
-- end
136
--
137
-- tokens.printlist = printlist -- will change to another namespace
138
--
139
-- function collectors.flush(tag)
140
-- printlist(collectordata[tag])
141
-- end
142
--
143
-- function collectors.register(name)
144
-- registered[csname_id(name)] = name
145
-- end
146
--
147
-- local call = command_id("call")
148
-- local letter = command_id("letter")
149
-- local other = command_id("other_char")
150
--
151
-- function collectors.install(tag,end_cs)
152
-- local data, d = { }, 0
153
-- collectordata[tag] = data
154
-- end_cs = gsub(end_cs,"^\\","")
155
-- local endcs = csname_id(end_cs)
156
-- while true do
157
-- local t = get_next()
158
-- local a, b = t[1], t[3]
159
-- if b == endcs then
160
-- context[end_cs]()
161
-- return
162
-- elseif a == call and registered[b] then
163
-- expand()
164
-- else
165
-- d = d + 1
166
-- data[d] = t
167
-- end
168
-- end
169
-- end
170
--
171
-- function collectors.show(data)
172
-- -- We no longer have methods as we only used (in demos) method a
173
-- -- so there is no need to burden the core with this.
174
-- if type(data) == "string" then
175
-- data = collectordata[data]
176
-- end
177
-- if not data then
178
-- return
179
-- end
180
-- local ctx_NC = context.NC
181
-- local ctx_NR = context.NR
182
-- local ctx_bold = context.bold
183
-- local ctx_verbatim = context.verbatim
184
-- context.starttabulate { "|T|Tr|cT|Tr|T|" }
185
-- ctx_NC() ctx_bold("cmd")
186
-- ctx_NC() ctx_bold("chr")
187
-- ctx_NC()
188
-- ctx_NC() ctx_bold("id")
189
-- ctx_NC() ctx_bold("name")
190
-- ctx_NC() ctx_NR()
191
-- context.HL()
192
-- for i=1,#data do
193
-- local token = data[i]
194
-- local cmd = token[1]
195
-- local chr = token[2]
196
-- local id = token[3]
197
-- local name = command_name(token)
198
-- ctx_NC()
199
-- ctx_verbatim(name)
200
-- ctx_NC()
201
-- if tonumber(chr) >= 0 then
202
-- ctx_verbatim(chr)
203
-- end
204
-- ctx_NC()
205
-- if cmd == letter or cmd == other then
206
-- ctx_verbatim(utfchar(chr))
207
-- end
208
-- ctx_NC()
209
-- if id > 0 then
210
-- ctx_verbatim(id)
211
-- end
212
-- ctx_NC()
213
-- if id > 0 then
214
-- ctx_verbatim(csname_name(token) or "")
215
-- end
216
-- ctx_NC() ctx_NR()
217
-- end
218
-- context.stoptabulate()
219
-- end
220
--
221
-- function collectors.test(tag,handle)
222
-- local t, w, tn, wn = { }, { }, 0, 0
223
-- handle = handle or collectors.defaultwords
224
-- local tagdata = collectordata[tag]
225
-- for k=1,#tagdata do
226
-- local v = tagdata[k]
227
-- if v[1] == letter then
228
-- wn = wn + 1
229
-- w[wn] = v[2]
230
-- else
231
-- if wn > 0 then
232
-- handle(t,w)
233
-- wn = 0
234
-- end
235
-- tn = tn + 1
236
-- t[tn] = v
237
-- end
238
-- end
239
-- if wn > 0 then
240
-- handle(t,w)
241
-- end
242
-- collectordata[tag] = t
243
-- end
244 245
-- Interfacing:
246 247
commands
.
collecttokens
=
collectors
.
install
248
commands
.
showtokens
=
collectors
.
show
249
commands
.
flushtokens
=
collectors
.
flush
250
commands
.
testtokens
=
collectors
.
test
251
commands
.
registertoken
=
collectors
.
register
252 253
-- Redundant:
254 255
-- function collectors.test(tag)
256
-- printlist(collectordata[tag])
257
-- end
258 259
-- For old times sake:
260 261
collectors
.
dowithwords
=
collectors
.
test
262 263
-- This is only used in old articles ... will move to a module:
264 265
tokens
.
vbox
=
create_token
(
"
vbox
"
)
266
tokens
.
hbox
=
create_token
(
"
hbox
"
)
267
tokens
.
vtop
=
create_token
(
"
vtop
"
)
268
tokens
.
bgroup
=
create_token
(
utfbyte
(
"
{
"
)
,
1
)
269
tokens
.
egroup
=
create_token
(
utfbyte
(
"
}
"
)
,
2
)
270 271
tokens
.
letter
=
function
(
chr
)
return
create_token
(
utfbyte
(
chr
)
,
11
)
end
272
tokens
.
other
=
function
(
chr
)
return
create_token
(
utfbyte
(
chr
)
,
12
)
end
273 274
tokens
.
letters
=
function
(
str
)
275
local
t
,
n
=
{
}
,
0
276
for
chr
in
utfvalues
(
str
)
do
277
n
=
n
+
1
278
t
[
n
]
=
create_token
(
chr
,
11
)
279
end
280
return
t
281
end
282 283
function
collectors
.
defaultwords
(
t
,
str
)
284
if
t
then
285
local
n
=
#
t
286
n
=
n
+
1
;
t
[
n
]
=
tokens
.
bgroup
287
n
=
n
+
1
;
t
[
n
]
=
create_token
(
"
red
"
)
288
for
i
=
1
,
#
str
do
289
n
=
n
+
1
;
t
[
n
]
=
tokens
.
other
(
'
*
'
)
290
end
291
n
=
n
+
1
;
t
[
n
]
=
tokens
.
egroup
292
end
293
end
294