buff-ini.lua /size: 27 Kb    last modification: 2021-10-28 13:50
1
if
not
modules
then
modules
=
{
}
end
modules
[
'
buff-ini
'
]
=
{
2
version
=
1
.
001
,
3
comment
=
"
companion to buff-ini.mkiv
"
,
4
author
=
"
Hans Hagen, PRAGMA-ADE, Hasselt NL
"
,
5
copyright
=
"
PRAGMA ADE / ConTeXt Development Team
"
,
6
license
=
"
see context related readme files
"
7
}
8 9
local
concat
=
table
.
concat
10
local
type
,
next
,
load
=
type
,
next
,
load
11
local
sub
,
format
,
find
=
string
.
sub
,
string
.
format
,
string
.
find
12
local
splitlines
,
validstring
,
replacenewlines
=
string
.
splitlines
,
string
.
valid
,
string
.
replacenewlines
13
local
P
,
Cs
,
patterns
,
lpegmatch
=
lpeg
.
P
,
lpeg
.
Cs
,
lpeg
.
patterns
,
lpeg
.
match
14
local
utfchar
=
utf
.
char
15
local
nameonly
=
file
.
nameonly
16
local
totable
=
string
.
totable
17
local
md5hex
=
md5
.
hex
18
local
isfile
=
lfs
.
isfile
19
local
savedata
=
io
.
savedata
20 21
local
trace_run
=
false
trackers
.
register
(
"
buffers.run
"
,
function
(
v
)
trace_run
=
v
end
)
22
local
trace_grab
=
false
trackers
.
register
(
"
buffers.grab
"
,
function
(
v
)
trace_grab
=
v
end
)
23
local
trace_visualize
=
false
trackers
.
register
(
"
buffers.visualize
"
,
function
(
v
)
trace_visualize
=
v
end
)
24 25
local
report_buffers
=
logs
.
reporter
(
"
buffers
"
,
"
usage
"
)
26
local
report_typeset
=
logs
.
reporter
(
"
buffers
"
,
"
typeset
"
)
27
----- report_grabbing = logs.reporter("buffers","grabbing")
28 29
local
context
=
context
30
local
commands
=
commands
31 32
local
implement
=
interfaces
.
implement
33 34
local
scanners
=
tokens
.
scanners
35
local
scanstring
=
scanners
.
string
36
local
scaninteger
=
scanners
.
integer
37
local
scanboolean
=
scanners
.
boolean
38
local
scancode
=
scanners
.
code
39
local
scantokencode
=
scanners
.
tokencode
40
----- scantoken = scanners.token
41 42
local
getters
=
tokens
.
getters
43
local
gettoken
=
getters
.
token
44 45
local
getcommand
=
token
.
get_command
46
local
getcsname
=
token
.
get_csname
47
local
getnextchar
=
token
.
scan_next_char
or
token
.
get_next_char
48 49
local
variables
=
interfaces
.
variables
50
local
settings_to_array
=
utilities
.
parsers
.
settings_to_array
51
local
formatters
=
string
.
formatters
52
local
addsuffix
=
file
.
addsuffix
53
local
replacesuffix
=
file
.
replacesuffix
54 55
local
registertempfile
=
luatex
.
registertempfile
56 57
local
v_yes
=
variables
.
yes
58
local
v_append
=
variables
.
append
59 60
local
eol
=
patterns
.
eol
61
local
space
=
patterns
.
space
62
local
whitespace
=
patterns
.
whitespace
63
local
blackspace
=
whitespace
-
eol
64
local
whatever
=
(
1
-
eol
)
^
1
*
eol
^
0
65
local
emptyline
=
space
^
0
*
eol
66 67
local
catcodenumbers
=
catcodes
.
numbers
68 69
local
ctxcatcodes
=
catcodenumbers
.
ctxcatcodes
70
local
txtcatcodes
=
catcodenumbers
.
txtcatcodes
71 72
local
setdata
=
job
.
datasets
.
setdata
73
local
getdata
=
job
.
datasets
.
getdata
74 75
local
ctx_viafile
=
context
.
viafile
76
local
ctx_getbuffer
=
context
.
getbuffer
77
local
ctx_pushcatcodetable
=
context
.
pushcatcodetable
78
local
ctx_popcatcodetable
=
context
.
popcatcodetable
79
local
ctx_setcatcodetable
=
context
.
setcatcodetable
80
local
ctx_printlines
=
context
.
printlines
81 82
buffers
=
buffers
or
{
}
83
local
buffers
=
buffers
84 85
local
cache
=
{
}
86 87
local
function
erase
(
name
)
88
cache
[
name
]
=
nil
89
end
90 91
local
function
assign
(
name
,
str
,
catcodes
)
92
cache
[
name
]
=
{
93
data
=
str
,
94
catcodes
=
catcodes
,
95
typeset
=
false
,
96
}
97
end
98 99
local
function
combine
(
name
,
str
,
prepend
)
100
local
buffer
=
cache
[
name
]
101
if
buffer
then
102
buffer
.
data
=
prepend
and
(
str
.
.
buffer
.
data
)
or
(
buffer
.
data
.
.
str
)
103
buffer
.
typeset
=
false
104
else
105
cache
[
name
]
=
{
106
data
=
str
,
107
typeset
=
false
,
108
}
109
end
110
end
111 112
local
function
prepend
(
name
,
str
)
113
combine
(
name
,
str
,
true
)
114
end
115 116
local
function
append
(
name
,
str
)
117
combine
(
name
,
str
)
118
end
119 120
local
function
exists
(
name
)
121
return
cache
[
name
]
122
end
123 124
local
function
getcontent
(
name
)
125
local
buffer
=
name
and
cache
[
name
]
126
return
buffer
and
buffer
.
data
or
"
"
127
end
128 129
local
function
empty
(
name
)
130
if
find
(
getcontent
(
name
)
,
"
%S
"
)
then
131
return
false
132
else
133
return
true
134
end
135
end
136 137
local
function
getlines
(
name
)
138
local
buffer
=
name
and
cache
[
name
]
139
return
buffer
and
splitlines
(
buffer
.
data
)
140
end
141 142
local
function
getnames
(
name
)
143
if
type
(
name
)
=
=
"
string
"
then
144
return
settings_to_array
(
name
)
145
else
146
return
name
147
end
148
end
149 150
local
function
istypeset
(
name
)
151
local
names
=
getnames
(
name
)
152
if
#
names
=
=
0
then
153
return
false
154
end
155
for
i
=
1
,
#
names
do
156
local
c
=
cache
[
names
[
i
]
]
157
if
c
and
not
c
.
typeset
then
158
return
false
159
end
160
end
161
return
true
162
end
163 164
local
function
markastypeset
(
name
)
165
local
names
=
getnames
(
name
)
166
for
i
=
1
,
#
names
do
167
local
c
=
cache
[
names
[
i
]
]
168
if
c
then
169
c
.
typeset
=
true
170
end
171
end
172
end
173 174
local
function
collectcontent
(
name
,
separator
)
-- no print
175
local
names
=
getnames
(
name
)
176
local
nnames
=
#
names
177
if
nnames
=
=
0
then
178
return
getcontent
(
"
"
)
-- default buffer
179
elseif
nnames
=
=
1
then
180
return
getcontent
(
names
[
1
]
)
181
else
182
local
t
=
{
}
183
local
n
=
0
184
for
i
=
1
,
nnames
do
185
local
c
=
getcontent
(
names
[
i
]
)
186
if
c
~
=
"
"
then
187
n
=
n
+
1
188
t
[
n
]
=
c
189
end
190
end
191
-- the default separator was \r, then \n and is now os.newline because buffers
192
-- can be loaded in other applications
193
return
concat
(
t
,
separator
or
os
.
newline
)
194
end
195
end
196 197
local
function
loadcontent
(
name
)
-- no print
198
local
content
=
collectcontent
(
name
,
"
\n
"
)
-- tex likes \n hm, elsewhere \r
199
local
ok
,
err
=
load
(
content
)
200
if
ok
then
201
return
ok
(
)
202
else
203
report_buffers
(
"
invalid lua code in buffer %a: %s
"
,
name
,
err
or
"
unknown error
"
)
204
end
205
end
206 207
buffers
.
raw
=
getcontent
208
buffers
.
erase
=
erase
209
buffers
.
assign
=
assign
210
buffers
.
prepend
=
prepend
211
buffers
.
append
=
append
212
buffers
.
exists
=
exists
213
buffers
.
empty
=
empty
214
buffers
.
getcontent
=
getcontent
215
buffers
.
getlines
=
getlines
216
buffers
.
collectcontent
=
collectcontent
217
buffers
.
loadcontent
=
loadcontent
218 219
-- the context interface
220 221
implement
{
222
name
=
"
assignbuffer
"
,
223
actions
=
assign
,
224
arguments
=
{
"
string
"
,
"
string
"
,
"
integer
"
}
225
}
226 227
implement
{
228
name
=
"
erasebuffer
"
,
229
actions
=
erase
,
230
arguments
=
"
string
"
231
}
232 233
-- local anything = patterns.anything
234
-- local alwaysmatched = patterns.alwaysmatched
235
-- local utf8character = patterns.utf8character
236
--
237
-- local function countnesting(b,e)
238
-- local n
239
-- local g = P(b) / function() n = n + 1 end
240
-- + P(e) / function() n = n - 1 end
241
-- -- + anything
242
-- + utf8character
243
-- local p = alwaysmatched / function() n = 0 end
244
-- * g^0
245
-- * alwaysmatched / function() return n end
246
-- return p
247
-- end
248 249
local
counters
=
{
}
250
local
nesting
=
0
251
local
autoundent
=
true
252
local
continue
=
false
253 254
-- Beware: the first character of bufferdata has to be discarded as it's there to
255
-- prevent gobbling of newlines in the case of nested buffers. The last one is
256
-- a newlinechar and is removed too.
257
--
258
-- An \n is unlikely to show up as \r is the endlinechar but \n is more generic
259
-- for us.
260 261
-- This fits the way we fetch verbatim: the indentation before the sentinel
262
-- determines the stripping.
263 264
-- str = [[
265
-- test test test test test test test
266
-- test test test test test test test
267
-- test test test test test test test
268
--
269
-- test test test test test test test
270
-- test test test test test test test
271
-- test test test test test test test
272
-- ]]
273 274
-- local function undent(str)
275
-- local margin = match(str,"[\n\r]( +)[\n\r]*$") or ""
276
-- local indent = #margin
277
-- if indent > 0 then
278
-- local lines = splitlines(str)
279
-- local ok = true
280
-- local pattern = "^" .. margin
281
-- for i=1,#lines do
282
-- local l = lines[i]
283
-- if find(l,pattern) then
284
-- lines[i] = sub(l,indent+1)
285
-- else
286
-- ok = false
287
-- break
288
-- end
289
-- end
290
-- if ok then
291
-- return concat(lines,"\n")
292
-- end
293
-- end
294
-- return str
295
-- end
296 297
-- how about tabs
298 299
local
strippers
=
{
}
300
local
nofspaces
=
0
301 302
local
normalline
=
space
^
0
/
function
(
s
)
local
n
=
#
s
if
n
<
nofspaces
then
nofspaces
=
n
end
end
303
*
whatever
304 305
local
getmargin
=
(
emptyline
+
normalline
)
^
1
306 307
local
function
undent
(
str
)
-- new version, needs testing: todo: not always needed, like in xtables
308
nofspaces
=
#
str
309
local
margin
=
lpegmatch
(
getmargin
,
str
)
310
if
nofspaces
=
=
#
str
or
nofspaces
=
=
0
then
311
return
str
312
end
313
local
stripper
=
strippers
[
nofspaces
]
314
if
not
stripper
then
315
stripper
=
Cs
(
(
(
space
^
-
nofspaces
)
/
"
"
*
whatever
+
emptyline
)
^
1
)
316
strippers
[
nofspaces
]
=
stripper
317
end
318
return
lpegmatch
(
stripper
,
str
)
or
str
319
end
320 321
buffers
.
undent
=
undent
322 323
-- function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes,doundent) -- maybe move \\ to call
324
-- local dn = getcontent(name)
325
-- if dn == "" then
326
-- nesting = 0
327
-- continue = false
328
-- end
329
-- if trace_grab then
330
-- if #bufferdata > 30 then
331
-- report_grabbing("%s => |%s..%s|",name,sub(bufferdata,1,10),sub(bufferdata,-10,#bufferdata))
332
-- else
333
-- report_grabbing("%s => |%s|",name,bufferdata)
334
-- end
335
-- end
336
-- local counter = counters[begintag]
337
-- if not counter then
338
-- counter = countnesting(begintag,endtag)
339
-- counters[begintag] = counter
340
-- end
341
-- nesting = nesting + lpegmatch(counter,bufferdata)
342
-- local more = nesting > 0
343
-- if more then
344
-- dn = dn .. sub(bufferdata,2,-1) .. endtag
345
-- nesting = nesting - 1
346
-- continue = true
347
-- else
348
-- if continue then
349
-- dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic
350
-- elseif dn == "" then
351
-- dn = sub(bufferdata,2,-2)
352
-- else
353
-- dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
354
-- end
355
-- local last = sub(dn,-1)
356
-- if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
357
-- dn = sub(dn,1,-2)
358
-- end
359
-- if doundent or (autoundent and doundent == nil) then
360
-- dn = undent(dn)
361
-- end
362
-- end
363
-- assign(name,dn,catcodes)
364
-- commands.doifelse(more)
365
-- end
366 367
local
split
=
table
.
setmetatableindex
(
function
(
t
,
k
)
368
local
v
=
totable
(
k
)
369
t
[
k
]
=
v
370
return
v
371
end
)
372 373
local
tochar
=
{
374
[
0
]
=
"
\\
"
,
375
[
1
]
=
"
{
"
,
376
[
2
]
=
"
}
"
,
377
[
3
]
=
"
$
"
,
378
[
4
]
=
"
&
"
,
379
[
5
]
=
"
\n
"
,
380
[
6
]
=
"
#
"
,
381
[
7
]
=
"
^
"
,
382
[
8
]
=
"
_
"
,
383
[
10
]
=
"
"
,
384
[
14
]
=
"
%
"
,
385
}
386 387
local
experiment
=
false
388
local
experiment
=
scantokencode
and
true
389 390
local
function
pickup
(
start
,
stop
)
391
local
stoplist
=
split
[
stop
]
-- totable(stop)
392
local
stoplength
=
#
stoplist
393
local
stoplast
=
stoplist
[
stoplength
]
394
local
startlist
=
split
[
start
]
-- totable(start)
395
local
startlength
=
#
startlist
396
local
startlast
=
startlist
[
startlength
]
397
local
list
=
{
}
398
local
size
=
0
399
local
depth
=
0
400
-- local done = 32
401
local
scancode
=
experiment
and
scantokencode
or
scancode
402
while
true
do
-- or use depth
403
local
char
=
scancode
(
)
404
if
char
then
405
-- if char < done then
406
-- -- we skip leading control characters so that we can use them to
407
-- -- obey spaces (a dirty trick)
408
-- else
409
-- done = 0
410
char
=
utfchar
(
char
)
411
size
=
size
+
1
412
list
[
size
]
=
char
413
if
char
=
=
stoplast
and
size
>
=
stoplength
then
414
local
done
=
true
415
local
last
=
size
416
for
i
=
stoplength
,
1
,
-1
do
417
if
stoplist
[
i
]
~
=
list
[
last
]
then
418
done
=
false
419
break
420
end
421
last
=
last
-
1
422
end
423
if
done
then
424
if
depth
>
0
then
425
depth
=
depth
-
1
426
else
427
break
428
end
429
char
=
false
-- trick: let's skip the next (start) test
430
end
431
end
432
if
char
=
=
startlast
and
size
>
=
startlength
then
433
local
done
=
true
434
local
last
=
size
435
for
i
=
startlength
,
1
,
-1
do
436
if
startlist
[
i
]
~
=
list
[
last
]
then
437
done
=
false
438
break
439
end
440
last
=
last
-
1
441
end
442
if
done
then
443
depth
=
depth
+
1
444
end
445
end
446
-- end
447
else
448
-- local t = scantoken()
449
local
t
=
gettoken
(
)
450
if
t
then
451
-- we're skipping leading stuff, like obeyedlines and relaxes
452
if
experiment
and
size
>
0
then
453
-- we're probably in a macro
454
local
char
=
tochar
[
getcommand
(
t
)
]
455
if
char
then
456
size
=
size
+
1
;
list
[
size
]
=
char
457
else
458
local
csname
=
getcsname
(
t
)
459
if
csname
=
=
stop
then
460
stoplength
=
0
461
break
462
else
463
size
=
size
+
1
;
list
[
size
]
=
"
\\
"
464
size
=
size
+
1
;
list
[
size
]
=
csname
465
size
=
size
+
1
;
list
[
size
]
=
"
"
466
end
467
end
468
else
469
-- ignore and hope for the best
470
end
471
else
472
break
473
end
474
end
475
end
476
local
start
=
1
477
local
stop
=
size
-
stoplength
-
1
478
-- not good enough: only empty lines, but even then we miss the leading
479
-- for verbatim
480
--
481
-- the next is not yet adapted to the new scanner ... we don't need lpeg here
482
--
483
for
i
=
start
,
stop
do
484
local
li
=
list
[
i
]
485
if
lpegmatch
(
blackspace
,
li
)
then
486
-- keep going
487
elseif
lpegmatch
(
eol
,
li
)
then
488
-- okay
489
start
=
i
+
1
490
else
491
break
492
end
493
end
494
for
i
=
stop
,
start
,
-1
do
495
if
lpegmatch
(
whitespace
,
list
[
i
]
)
then
496
stop
=
i
-
1
497
else
498
break
499
end
500
end
501
--
502
if
start
<
=
stop
then
503
return
concat
(
list
,
"
"
,
start
,
stop
)
504
else
505
return
"
"
506
end
507
end
508 509
-- -- lmtx:
510
--
511
-- local function pickup(start,stop)
512
-- local stoplist = split[stop] -- totable(stop)
513
-- local stoplength = #stoplist
514
-- local stoplast = stoplist[stoplength]
515
-- local startlist = split[start] -- totable(start)
516
-- local startlength = #startlist
517
-- local startlast = startlist[startlength]
518
-- local list = { }
519
-- local size = 0
520
-- local depth = 0
521
-- getnextchar() -- we start with a \relax
522
-- while true do -- or use depth
523
-- local char = getnextchar()
524
-- if char then
525
-- size = size + 1
526
-- list[size] = char
527
-- if char == stoplast and size >= stoplength then
528
-- local done = true
529
-- local last = size
530
-- for i=stoplength,1,-1 do
531
-- if stoplist[i] ~= list[last] then
532
-- done = false
533
-- break
534
-- end
535
-- last = last - 1
536
-- end
537
-- if done then
538
-- if depth > 0 then
539
-- depth = depth - 1
540
-- else
541
-- break
542
-- end
543
-- char = false -- trick: let's skip the next (start) test
544
-- end
545
-- end
546
-- if char == startlast and size >= startlength then
547
-- local done = true
548
-- local last = size
549
-- for i=startlength,1,-1 do
550
-- if startlist[i] ~= list[last] then
551
-- done = false
552
-- break
553
-- end
554
-- last = last - 1
555
-- end
556
-- if done then
557
-- depth = depth + 1
558
-- end
559
-- end
560
-- else
561
-- local t = gettoken()
562
-- if t then
563
-- -- we're skipping leading stuff, like obeyedlines and relaxes
564
-- if experiment and size > 0 then
565
-- -- we're probably in a macro
566
-- local char = tochar[getcommand(t)]
567
-- if char then
568
-- size = size + 1 ; list[size] = char
569
-- else
570
-- local csname = getcsname(t)
571
-- if csname == stop then
572
-- stoplength = 0
573
-- break
574
-- else
575
-- size = size + 1 ; list[size] = "\\"
576
-- size = size + 1 ; list[size] = csname
577
-- size = size + 1 ; list[size] = " "
578
-- end
579
-- end
580
-- else
581
-- -- ignore and hope for the best
582
-- end
583
-- else
584
-- break
585
-- end
586
-- end
587
-- end
588
-- local start = 1
589
-- local stop = size - stoplength - 1
590
-- -- not good enough: only empty lines, but even then we miss the leading
591
-- -- for verbatim
592
-- --
593
-- -- the next is not yet adapted to the new scanner ... we don't need lpeg here
594
-- --
595
-- for i=start,stop do
596
-- local li = list[i]
597
-- if lpegmatch(blackspace,li) then
598
-- -- keep going
599
-- elseif lpegmatch(eol,li) then
600
-- -- okay
601
-- start = i + 1
602
-- else
603
-- break
604
-- end
605
-- end
606
-- for i=stop,start,-1 do
607
-- if lpegmatch(whitespace,list[i]) then
608
-- stop = i - 1
609
-- else
610
-- break
611
-- end
612
-- end
613
-- --
614
-- if start <= stop then
615
-- return concat(list,"",start,stop)
616
-- else
617
-- return ""
618
-- end
619
-- end
620 621
-- function buffers.pickup(name,start,stop,finish,catcodes,doundent)
622
-- local data = tokens.pickup(start,stop)
623
-- if doundent or (autoundent and doundent == nil) then
624
-- data = buffers.undent(data)
625
-- end
626
-- buffers.assign(name,data,catcodes)
627
-- context(finish)
628
-- end
629 630
-- commands.pickupbuffer = buffers.pickup
631 632
tokens
.
pickup
=
pickup
633 634
implement
{
635
name
=
"
pickupbuffer
"
,
636
actions
=
function
(
)
637
-- let's pickup all here (no arguments)
638
local
name
=
scanstring
(
)
639
local
start
=
scanstring
(
)
640
local
stop
=
scanstring
(
)
641
local
finish
=
scanstring
(
)
642
local
catcodes
=
scaninteger
(
)
643
local
doundent
=
scanboolean
(
)
644
-- could be a scanner:
645
local
data
=
pickup
(
start
,
stop
)
646
if
doundent
or
(
autoundent
and
doundent
=
=
nil
)
then
647
data
=
undent
(
data
)
648
end
649
buffers
.
assign
(
name
,
data
,
catcodes
)
650
-- context[finish]()
651
context
(
finish
)
652
end
653
}
654 655
local
function
savebuffer
(
list
,
name
,
prefix
,
option
,
directory
)
-- name is optional
656
if
not
list
or
list
=
=
"
"
then
657
list
=
name
658
end
659
if
not
name
or
name
=
=
"
"
then
660
name
=
list
661
end
662
local
content
=
collectcontent
(
list
,
nil
)
or
"
"
663
if
content
=
=
"
"
then
664
content
=
"
empty buffer
"
665
end
666
if
prefix
=
=
v_yes
then
667
name
=
addsuffix
(
tex
.
jobname
.
.
"
-
"
.
.
name
,
"
tmp
"
)
668
end
669
if
directory
~
=
"
"
and
dir
.
makedirs
(
directory
)
then
670
name
=
file
.
join
(
directory
,
name
)
671
end
672
io
.
savedata
(
name
,
replacenewlines
(
content
)
,
"
\n
"
,
option
=
=
v_append
)
673
end
674 675
implement
{
676
name
=
"
savebuffer
"
,
677
actions
=
savebuffer
,
678
arguments
=
"
5 strings
"
,
679
}
680 681
-- we can consider adding a size to avoid unlikely clashes
682 683
local
olddata
=
nil
684
local
newdata
=
nil
685
local
getrunner
=
sandbox
.
getrunner
686 687
local
runner
=
sandbox
.
registerrunner
{
688
name
=
"
run buffer
"
,
689
program
=
"
context
"
,
690
method
=
"
execute
"
,
691
template
=
(
jit
and
"
--jit
"
or
"
"
)
.
.
"
--purgeall %?path: --path=%path% ?% %filename%
"
,
692
reporter
=
report_typeset
,
693
checkers
=
{
694
filename
=
"
readable
"
,
695
}
696
}
697 698
local
function
runbuffer
(
name
,
encapsulate
,
runnername
,
suffixes
)
699
if
not
runnername
or
runnername
=
=
"
"
then
700
runnername
=
"
run buffer
"
701
end
702
local
suffix
=
"
pdf
"
703
if
type
(
suffixes
)
=
=
"
table
"
then
704
suffix
=
suffixes
[
1
]
705
elseif
type
(
suffixes
)
=
=
"
string
"
and
suffixes
~
=
"
"
then
706
suffix
=
suffixes
707
suffixes
=
{
suffix
}
708
else
709
suffixes
=
{
suffix
}
710
end
711
local
runner
=
getrunner
(
runnername
)
712
if
not
runner
then
713
report_typeset
(
"
unknown runner %a
"
,
runnername
)
714
return
715
end
716
if
not
olddata
then
717
olddata
=
getdata
(
"
buffers
"
,
"
runners
"
)
or
{
}
718
local
suffixes
=
olddata
.
suffixes
719
local
hashes
=
olddata
.
hashes
720
if
hashes
and
suffixes
then
721
for
k
,
hash
in
next
,
hashes
do
722
for
h
,
v
in
next
,
hash
do
723
for
s
,
v
in
next
,
suffixes
do
724
local
tmp
=
addsuffix
(
h
,
s
)
725
-- report_typeset("mark for deletion: %s",tmp)
726
registertempfile
(
tmp
)
727
end
728
end
729
end
730
end
731
end
732
if
not
newdata
then
733
newdata
=
{
734
version
=
environment
.
version
,
735
suffixes
=
{
}
,
736
hashes
=
{
}
,
737
}
738
setdata
{
739
name
=
"
buffers
"
,
740
tag
=
"
runners
"
,
741
data
=
newdata
,
742
}
743
end
744
local
oldhashes
=
olddata
.
hashes
or
{
}
745
local
newhashes
=
newdata
.
hashes
or
{
}
746
local
old
=
oldhashes
[
suffix
]
747
local
new
=
newhashes
[
suffix
]
748
if
not
old
then
749
old
=
{
}
750
oldhashes
[
suffix
]
=
old
751
for
hash
,
n
in
next
,
old
do
752
local
tag
=
formatters
[
"
%s-t-b-%s
"
]
(
tex
.
jobname
,
hash
)
753
local
tmp
=
addsuffix
(
tag
,
"
tmp
"
)
754
-- report_typeset("mark for deletion: %s",tmp)
755
registertempfile
(
tmp
)
-- to be sure
756
end
757
end
758
if
not
new
then
759
new
=
{
}
760
newhashes
[
suffix
]
=
new
761
end
762
local
names
=
getnames
(
name
)
763
local
content
=
collectcontent
(
names
,
nil
)
or
"
"
764
if
content
=
=
"
"
then
765
content
=
"
empty buffer
"
766
end
767
if
encapsulate
then
768
content
=
formatters
[
"
\\starttext\n%s\n\\stoptext\n
"
]
(
content
)
769
end
770
--
771
local
hash
=
md5hex
(
content
)
772
local
tag
=
formatters
[
"
%s-t-b-%s
"
]
(
nameonly
(
tex
.
jobname
)
,
hash
)
-- make sure we run on the local path
773
--
774
local
filename
=
addsuffix
(
tag
,
"
tmp
"
)
775
local
resultname
=
addsuffix
(
tag
,
suffix
)
776
--
777
if
new
[
tag
]
then
778
-- done
779
elseif
not
old
[
tag
]
or
olddata
.
version
~
=
newdata
.
version
or
not
isfile
(
resultname
)
then
780
if
trace_run
then
781
report_typeset
(
"
changes in %a, processing forced
"
,
name
)
782
end
783
savedata
(
filename
,
content
)
784
report_typeset
(
"
processing saved buffer %a\n
"
,
filename
)
785
runner
{
786
filename
=
filename
,
787
path
=
environment
.
arguments
.
path
,
-- maybe take all set paths
788
}
789
end
790
new
[
tag
]
=
(
new
[
tag
]
or
0
)
+
1
791
report_typeset
(
"
no changes in %a, processing skipped
"
,
name
)
792
registertempfile
(
filename
)
793
-- report_typeset("mark for persistence: %s",filename)
794
for
i
=
1
,
#
suffixes
do
795
local
suffix
=
suffixes
[
i
]
796
newdata
.
suffixes
[
suffix
]
=
true
797
local
tmp
=
addsuffix
(
tag
,
suffix
)
798
-- report_typeset("mark for persistance: %s",tmp)
799
registertempfile
(
tmp
,
nil
,
true
)
800
end
801
--
802
return
resultname
-- first result
803
end
804 805
local
f_getbuffer
=
formatters
[
"
buffer.%s
"
]
806 807
local
function
getbuffer
(
name
)
808
local
str
=
getcontent
(
name
)
809
if
str
~
=
"
"
then
810
-- characters.showstring(str)
811
ctx_viafile
(
str
,
f_getbuffer
(
validstring
(
name
,
"
noname
"
)
)
)
812
end
813
end
814 815
local
function
getbuffermkvi
(
name
)
-- rather direct !
816
ctx_viafile
(
resolvers
.
macros
.
preprocessed
(
getcontent
(
name
)
)
,
formatters
[
"
buffer.%s.mkiv
"
]
(
validstring
(
name
,
"
noname
"
)
)
)
817
end
818 819
local
function
gettexbuffer
(
name
)
820
local
buffer
=
name
and
cache
[
name
]
821
if
buffer
and
buffer
.
data
~
=
"
"
then
822
ctx_pushcatcodetable
(
)
823
if
buffer
.
catcodes
=
=
txtcatcodes
then
824
ctx_setcatcodetable
(
txtcatcodes
)
825
else
826
ctx_setcatcodetable
(
ctxcatcodes
)
827
end
828
-- context(function() ctx_viafile(buffer.data) end)
829
ctx_getbuffer
{
name
}
-- viafile flushes too soon
830
ctx_popcatcodetable
(
)
831
end
832
end
833 834
buffers
.
get
=
getbuffer
835
buffers
.
getmkiv
=
getbuffermkiv
836
buffers
.
gettexbuffer
=
gettexbuffer
837
buffers
.
run
=
runbuffer
838 839
implement
{
name
=
"
getbufferctxlua
"
,
actions
=
loadcontent
,
arguments
=
"
string
"
}
840
implement
{
name
=
"
getbuffer
"
,
actions
=
getbuffer
,
arguments
=
"
string
"
}
841
implement
{
name
=
"
getbuffermkvi
"
,
actions
=
getbuffermkvi
,
arguments
=
"
string
"
}
842
implement
{
name
=
"
gettexbuffer
"
,
actions
=
gettexbuffer
,
arguments
=
"
string
"
}
843 844
interfaces
.
implement
{
845
name
=
"
getbuffercontent
"
,
846
arguments
=
"
string
"
,
847
actions
=
{
getcontent
,
context
}
,
848
}
849 850
implement
{
851
name
=
"
typesetbuffer
"
,
852
actions
=
{
runbuffer
,
context
}
,
853
arguments
=
{
"
string
"
,
true
}
854
}
855 856
implement
{
857
name
=
"
runbuffer
"
,
858
actions
=
{
runbuffer
,
context
}
,
859
arguments
=
{
"
string
"
,
false
,
"
string
"
}
860
}
861 862
implement
{
863
name
=
"
doifelsebuffer
"
,
864
actions
=
{
exists
,
commands
.
doifelse
}
,
865
arguments
=
"
string
"
866
}
867 868
implement
{
869
name
=
"
doifelsebufferempty
"
,
870
actions
=
{
empty
,
commands
.
doifelse
}
,
871
arguments
=
"
string
"
872
}
873 874
-- This only used for mp buffers and is a kludge. Don't change the
875
-- texprint into texsprint as it fails because "p<nl>enddef" becomes
876
-- "penddef" then.
877 878
implement
{
879
name
=
"
feedback
"
,
-- bad name, maybe rename to injectbuffercontent
880
actions
=
{
collectcontent
,
ctx_printlines
}
,
881
arguments
=
"
string
"
882
}
883 884
do
885 886
local
context
=
context
887
local
ctxcore
=
context
.
core
888 889
local
ctx_startbuffer
=
ctxcore
.
startbuffer
890
local
ctx_stopbuffer
=
ctxcore
.
stopbuffer
891 892
local
ctx_startcollecting
=
context
.
startcollecting
893
local
ctx_stopcollecting
=
context
.
stopcollecting
894 895
function
ctxcore
.
startbuffer
(
...
)
896
ctx_startcollecting
(
)
897
ctx_startbuffer
(
...
)
898
end
899 900
function
ctxcore
.
stopbuffer
(
)
901
ctx_stopbuffer
(
)
902
ctx_stopcollecting
(
)
903
end
904 905
end
906 907
-- moved here:
908 909
function
buffers
.
samplefile
(
name
)
910
if
not
exists
(
name
)
then
911
assign
(
name
,
io
.
loaddata
(
resolvers
.
findfile
(
name
)
)
)
912
end
913
getbuffer
(
name
)
914
end
915 916
implement
{
917
name
=
"
samplefile
"
,
-- bad name, maybe rename to injectbuffercontent
918
actions
=
buffers
.
samplefile
,
919
arguments
=
"
string
"
920
}
921