buff-ini.lua /size: 26 Kb    last modification: 2020-07-01 14:35
1
if
not
modules
then
modules
=
{
}
end
modules
[
'
buff-ini
'
]
=
{
2
version
=
1
.
001
,
3
comment
=
"
companion to buff-ini.mkiv
"
,
4
author
=
"
Hans Hagen, PRAGMA-ADE, Hasselt NL
"
,
5
copyright
=
"
PRAGMA ADE / ConTeXt Development Team
"
,
6
license
=
"
see context related readme files
"
7
}
8 9
local
concat
=
table
.
concat
10
local
type
,
next
,
load
=
type
,
next
,
load
11
local
sub
,
format
=
string
.
sub
,
string
.
format
12
local
splitlines
,
validstring
,
replacenewlines
=
string
.
splitlines
,
string
.
valid
,
string
.
replacenewlines
13
local
P
,
Cs
,
patterns
,
lpegmatch
=
lpeg
.
P
,
lpeg
.
Cs
,
lpeg
.
patterns
,
lpeg
.
match
14
local
utfchar
=
utf
.
char
15
local
nameonly
=
file
.
nameonly
16
local
totable
=
string
.
totable
17
local
md5hex
=
md5
.
hex
18
local
isfile
=
lfs
.
isfile
19
local
savedata
=
io
.
savedata
20 21
local
trace_run
=
false
trackers
.
register
(
"
buffers.run
"
,
function
(
v
)
trace_run
=
v
end
)
22
local
trace_grab
=
false
trackers
.
register
(
"
buffers.grab
"
,
function
(
v
)
trace_grab
=
v
end
)
23
local
trace_visualize
=
false
trackers
.
register
(
"
buffers.visualize
"
,
function
(
v
)
trace_visualize
=
v
end
)
24 25
local
report_buffers
=
logs
.
reporter
(
"
buffers
"
,
"
usage
"
)
26
local
report_typeset
=
logs
.
reporter
(
"
buffers
"
,
"
typeset
"
)
27
----- report_grabbing = logs.reporter("buffers","grabbing")
28 29
local
context
=
context
30
local
commands
=
commands
31 32
local
implement
=
interfaces
.
implement
33 34
local
scanners
=
tokens
.
scanners
35
local
scanstring
=
scanners
.
string
36
local
scaninteger
=
scanners
.
integer
37
local
scanboolean
=
scanners
.
boolean
38
local
scancode
=
scanners
.
code
39
local
scantokencode
=
scanners
.
tokencode
40
----- scantoken = scanners.token
41 42
local
getters
=
tokens
.
getters
43
local
gettoken
=
getters
.
token
44 45
local
getcommand
=
token
.
get_command
46
local
getcsname
=
token
.
get_csname
47
local
getnextchar
=
token
.
get_next_char
48 49
local
variables
=
interfaces
.
variables
50
local
settings_to_array
=
utilities
.
parsers
.
settings_to_array
51
local
formatters
=
string
.
formatters
52
local
addsuffix
=
file
.
addsuffix
53
local
replacesuffix
=
file
.
replacesuffix
54 55
local
registertempfile
=
luatex
.
registertempfile
56 57
local
v_yes
=
variables
.
yes
58 59
local
eol
=
patterns
.
eol
60
local
space
=
patterns
.
space
61
local
whitespace
=
patterns
.
whitespace
62
local
blackspace
=
whitespace
-
eol
63
local
whatever
=
(
1
-
eol
)
^
1
*
eol
^
0
64
local
emptyline
=
space
^
0
*
eol
65 66
local
catcodenumbers
=
catcodes
.
numbers
67 68
local
ctxcatcodes
=
catcodenumbers
.
ctxcatcodes
69
local
txtcatcodes
=
catcodenumbers
.
txtcatcodes
70 71
local
setdata
=
job
.
datasets
.
setdata
72
local
getdata
=
job
.
datasets
.
getdata
73 74
local
ctx_viafile
=
context
.
viafile
75
local
ctx_getbuffer
=
context
.
getbuffer
76
local
ctx_pushcatcodetable
=
context
.
pushcatcodetable
77
local
ctx_popcatcodetable
=
context
.
popcatcodetable
78
local
ctx_setcatcodetable
=
context
.
setcatcodetable
79
local
ctx_printlines
=
context
.
printlines
80 81
buffers
=
buffers
or
{
}
82
local
buffers
=
buffers
83 84
local
cache
=
{
}
85 86
local
function
erase
(
name
)
87
cache
[
name
]
=
nil
88
end
89 90
local
function
assign
(
name
,
str
,
catcodes
)
91
cache
[
name
]
=
{
92
data
=
str
,
93
catcodes
=
catcodes
,
94
typeset
=
false
,
95
}
96
end
97 98
local
function
combine
(
name
,
str
,
prepend
)
99
local
buffer
=
cache
[
name
]
100
if
buffer
then
101
buffer
.
data
=
prepend
and
(
str
.
.
buffer
.
data
)
or
(
buffer
.
data
.
.
str
)
102
buffer
.
typeset
=
false
103
else
104
cache
[
name
]
=
{
105
data
=
str
,
106
typeset
=
false
,
107
}
108
end
109
end
110 111
local
function
prepend
(
name
,
str
)
112
combine
(
name
,
str
,
true
)
113
end
114 115
local
function
append
(
name
,
str
)
116
combine
(
name
,
str
)
117
end
118 119
local
function
exists
(
name
)
120
return
cache
[
name
]
121
end
122 123
local
function
getcontent
(
name
)
124
local
buffer
=
name
and
cache
[
name
]
125
return
buffer
and
buffer
.
data
or
"
"
126
end
127 128
local
function
getlines
(
name
)
129
local
buffer
=
name
and
cache
[
name
]
130
return
buffer
and
splitlines
(
buffer
.
data
)
131
end
132 133
local
function
getnames
(
name
)
134
if
type
(
name
)
=
=
"
string
"
then
135
return
settings_to_array
(
name
)
136
else
137
return
name
138
end
139
end
140 141
local
function
istypeset
(
name
)
142
local
names
=
getnames
(
name
)
143
if
#
names
=
=
0
then
144
return
false
145
end
146
for
i
=
1
,
#
names
do
147
local
c
=
cache
[
names
[
i
]
]
148
if
c
and
not
c
.
typeset
then
149
return
false
150
end
151
end
152
return
true
153
end
154 155
local
function
markastypeset
(
name
)
156
local
names
=
getnames
(
name
)
157
for
i
=
1
,
#
names
do
158
local
c
=
cache
[
names
[
i
]
]
159
if
c
then
160
c
.
typeset
=
true
161
end
162
end
163
end
164 165
local
function
collectcontent
(
name
,
separator
)
-- no print
166
local
names
=
getnames
(
name
)
167
local
nnames
=
#
names
168
if
nnames
=
=
0
then
169
return
getcontent
(
"
"
)
-- default buffer
170
elseif
nnames
=
=
1
then
171
return
getcontent
(
names
[
1
]
)
172
else
173
local
t
=
{
}
174
local
n
=
0
175
for
i
=
1
,
nnames
do
176
local
c
=
getcontent
(
names
[
i
]
)
177
if
c
~
=
"
"
then
178
n
=
n
+
1
179
t
[
n
]
=
c
180
end
181
end
182
-- the default separator was \r, then \n and is now os.newline because buffers
183
-- can be loaded in other applications
184
return
concat
(
t
,
separator
or
os
.
newline
)
185
end
186
end
187 188
local
function
loadcontent
(
name
)
-- no print
189
local
content
=
collectcontent
(
name
,
"
\n
"
)
-- tex likes \n hm, elsewhere \r
190
local
ok
,
err
=
load
(
content
)
191
if
ok
then
192
return
ok
(
)
193
else
194
report_buffers
(
"
invalid lua code in buffer %a: %s
"
,
name
,
err
or
"
unknown error
"
)
195
end
196
end
197 198
buffers
.
raw
=
getcontent
199
buffers
.
erase
=
erase
200
buffers
.
assign
=
assign
201
buffers
.
prepend
=
prepend
202
buffers
.
append
=
append
203
buffers
.
exists
=
exists
204
buffers
.
getcontent
=
getcontent
205
buffers
.
getlines
=
getlines
206
buffers
.
collectcontent
=
collectcontent
207
buffers
.
loadcontent
=
loadcontent
208 209
-- the context interface
210 211
implement
{
212
name
=
"
assignbuffer
"
,
213
actions
=
assign
,
214
arguments
=
{
"
string
"
,
"
string
"
,
"
integer
"
}
215
}
216 217
implement
{
218
name
=
"
erasebuffer
"
,
219
actions
=
erase
,
220
arguments
=
"
string
"
221
}
222 223
-- local anything = patterns.anything
224
-- local alwaysmatched = patterns.alwaysmatched
225
-- local utf8character = patterns.utf8character
226
--
227
-- local function countnesting(b,e)
228
-- local n
229
-- local g = P(b) / function() n = n + 1 end
230
-- + P(e) / function() n = n - 1 end
231
-- -- + anything
232
-- + utf8character
233
-- local p = alwaysmatched / function() n = 0 end
234
-- * g^0
235
-- * alwaysmatched / function() return n end
236
-- return p
237
-- end
238 239
local
counters
=
{
}
240
local
nesting
=
0
241
local
autoundent
=
true
242
local
continue
=
false
243 244
-- Beware: the first character of bufferdata has to be discarded as it's there to
245
-- prevent gobbling of newlines in the case of nested buffers. The last one is
246
-- a newlinechar and is removed too.
247
--
248
-- An \n is unlikely to show up as \r is the endlinechar but \n is more generic
249
-- for us.
250 251
-- This fits the way we fetch verbatim: the indentation before the sentinel
252
-- determines the stripping.
253 254
-- str = [[
255
-- test test test test test test test
256
-- test test test test test test test
257
-- test test test test test test test
258
--
259
-- test test test test test test test
260
-- test test test test test test test
261
-- test test test test test test test
262
-- ]]
263 264
-- local function undent(str)
265
-- local margin = match(str,"[\n\r]( +)[\n\r]*$") or ""
266
-- local indent = #margin
267
-- if indent > 0 then
268
-- local lines = splitlines(str)
269
-- local ok = true
270
-- local pattern = "^" .. margin
271
-- for i=1,#lines do
272
-- local l = lines[i]
273
-- if find(l,pattern) then
274
-- lines[i] = sub(l,indent+1)
275
-- else
276
-- ok = false
277
-- break
278
-- end
279
-- end
280
-- if ok then
281
-- return concat(lines,"\n")
282
-- end
283
-- end
284
-- return str
285
-- end
286 287
-- how about tabs
288 289
local
strippers
=
{
}
290
local
nofspaces
=
0
291 292
local
normalline
=
space
^
0
/
function
(
s
)
local
n
=
#
s
if
n
<
nofspaces
then
nofspaces
=
n
end
end
293
*
whatever
294 295
local
getmargin
=
(
emptyline
+
normalline
)
^
1
296 297
local
function
undent
(
str
)
-- new version, needs testing: todo: not always needed, like in xtables
298
nofspaces
=
#
str
299
local
margin
=
lpegmatch
(
getmargin
,
str
)
300
if
nofspaces
=
=
#
str
or
nofspaces
=
=
0
then
301
return
str
302
end
303
local
stripper
=
strippers
[
nofspaces
]
304
if
not
stripper
then
305
stripper
=
Cs
(
(
(
space
^
-
nofspaces
)
/
"
"
*
whatever
+
emptyline
)
^
1
)
306
strippers
[
nofspaces
]
=
stripper
307
end
308
return
lpegmatch
(
stripper
,
str
)
or
str
309
end
310 311
buffers
.
undent
=
undent
312 313
-- function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes,doundent) -- maybe move \\ to call
314
-- local dn = getcontent(name)
315
-- if dn == "" then
316
-- nesting = 0
317
-- continue = false
318
-- end
319
-- if trace_grab then
320
-- if #bufferdata > 30 then
321
-- report_grabbing("%s => |%s..%s|",name,sub(bufferdata,1,10),sub(bufferdata,-10,#bufferdata))
322
-- else
323
-- report_grabbing("%s => |%s|",name,bufferdata)
324
-- end
325
-- end
326
-- local counter = counters[begintag]
327
-- if not counter then
328
-- counter = countnesting(begintag,endtag)
329
-- counters[begintag] = counter
330
-- end
331
-- nesting = nesting + lpegmatch(counter,bufferdata)
332
-- local more = nesting > 0
333
-- if more then
334
-- dn = dn .. sub(bufferdata,2,-1) .. endtag
335
-- nesting = nesting - 1
336
-- continue = true
337
-- else
338
-- if continue then
339
-- dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic
340
-- elseif dn == "" then
341
-- dn = sub(bufferdata,2,-2)
342
-- else
343
-- dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
344
-- end
345
-- local last = sub(dn,-1)
346
-- if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
347
-- dn = sub(dn,1,-2)
348
-- end
349
-- if doundent or (autoundent and doundent == nil) then
350
-- dn = undent(dn)
351
-- end
352
-- end
353
-- assign(name,dn,catcodes)
354
-- commands.doifelse(more)
355
-- end
356 357
local
split
=
table
.
setmetatableindex
(
function
(
t
,
k
)
358
local
v
=
totable
(
k
)
359
t
[
k
]
=
v
360
return
v
361
end
)
362 363
local
tochar
=
{
364
[
0
]
=
"
\\
"
,
365
[
1
]
=
"
{
"
,
366
[
2
]
=
"
}
"
,
367
[
3
]
=
"
$
"
,
368
[
4
]
=
"
&
"
,
369
[
5
]
=
"
\n
"
,
370
[
6
]
=
"
#
"
,
371
[
7
]
=
"
^
"
,
372
[
8
]
=
"
_
"
,
373
[
10
]
=
"
"
,
374
[
14
]
=
"
%
"
,
375
}
376 377
local
experiment
=
false
378
local
experiment
=
scantokencode
and
true
379 380
local
function
pickup
(
start
,
stop
)
381
local
stoplist
=
split
[
stop
]
-- totable(stop)
382
local
stoplength
=
#
stoplist
383
local
stoplast
=
stoplist
[
stoplength
]
384
local
startlist
=
split
[
start
]
-- totable(start)
385
local
startlength
=
#
startlist
386
local
startlast
=
startlist
[
startlength
]
387
local
list
=
{
}
388
local
size
=
0
389
local
depth
=
0
390
-- local done = 32
391
local
scancode
=
experiment
and
scantokencode
or
scancode
392
while
true
do
-- or use depth
393
local
char
=
scancode
(
)
394
if
char
then
395
-- if char < done then
396
-- -- we skip leading control characters so that we can use them to
397
-- -- obey spaces (a dirty trick)
398
-- else
399
-- done = 0
400
char
=
utfchar
(
char
)
401
size
=
size
+
1
402
list
[
size
]
=
char
403
if
char
=
=
stoplast
and
size
>
=
stoplength
then
404
local
done
=
true
405
local
last
=
size
406
for
i
=
stoplength
,
1
,
-1
do
407
if
stoplist
[
i
]
~
=
list
[
last
]
then
408
done
=
false
409
break
410
end
411
last
=
last
-
1
412
end
413
if
done
then
414
if
depth
>
0
then
415
depth
=
depth
-
1
416
else
417
break
418
end
419
char
=
false
-- trick: let's skip the next (start) test
420
end
421
end
422
if
char
=
=
startlast
and
size
>
=
startlength
then
423
local
done
=
true
424
local
last
=
size
425
for
i
=
startlength
,
1
,
-1
do
426
if
startlist
[
i
]
~
=
list
[
last
]
then
427
done
=
false
428
break
429
end
430
last
=
last
-
1
431
end
432
if
done
then
433
depth
=
depth
+
1
434
end
435
end
436
-- end
437
else
438
-- local t = scantoken()
439
local
t
=
gettoken
(
)
440
if
t
then
441
-- we're skipping leading stuff, like obeyedlines and relaxes
442
if
experiment
and
size
>
0
then
443
-- we're probably in a macro
444
local
char
=
tochar
[
getcommand
(
t
)
]
445
if
char
then
446
size
=
size
+
1
;
list
[
size
]
=
char
447
else
448
local
csname
=
getcsname
(
t
)
449
if
csname
=
=
stop
then
450
stoplength
=
0
451
break
452
else
453
size
=
size
+
1
;
list
[
size
]
=
"
\\
"
454
size
=
size
+
1
;
list
[
size
]
=
csname
455
size
=
size
+
1
;
list
[
size
]
=
"
"
456
end
457
end
458
else
459
-- ignore and hope for the best
460
end
461
else
462
break
463
end
464
end
465
end
466
local
start
=
1
467
local
stop
=
size
-
stoplength
-
1
468
-- not good enough: only empty lines, but even then we miss the leading
469
-- for verbatim
470
--
471
-- the next is not yet adapted to the new scanner ... we don't need lpeg here
472
--
473
for
i
=
start
,
stop
do
474
local
li
=
list
[
i
]
475
if
lpegmatch
(
blackspace
,
li
)
then
476
-- keep going
477
elseif
lpegmatch
(
eol
,
li
)
then
478
-- okay
479
start
=
i
+
1
480
else
481
break
482
end
483
end
484
for
i
=
stop
,
start
,
-1
do
485
if
lpegmatch
(
whitespace
,
list
[
i
]
)
then
486
stop
=
i
-
1
487
else
488
break
489
end
490
end
491
--
492
if
start
<
=
stop
then
493
return
concat
(
list
,
"
"
,
start
,
stop
)
494
else
495
return
"
"
496
end
497
end
498 499
-- -- lmtx:
500
--
501
-- local function pickup(start,stop)
502
-- local stoplist = split[stop] -- totable(stop)
503
-- local stoplength = #stoplist
504
-- local stoplast = stoplist[stoplength]
505
-- local startlist = split[start] -- totable(start)
506
-- local startlength = #startlist
507
-- local startlast = startlist[startlength]
508
-- local list = { }
509
-- local size = 0
510
-- local depth = 0
511
-- getnextchar() -- we start with a \relax
512
-- while true do -- or use depth
513
-- local char = getnextchar()
514
-- if char then
515
-- size = size + 1
516
-- list[size] = char
517
-- if char == stoplast and size >= stoplength then
518
-- local done = true
519
-- local last = size
520
-- for i=stoplength,1,-1 do
521
-- if stoplist[i] ~= list[last] then
522
-- done = false
523
-- break
524
-- end
525
-- last = last - 1
526
-- end
527
-- if done then
528
-- if depth > 0 then
529
-- depth = depth - 1
530
-- else
531
-- break
532
-- end
533
-- char = false -- trick: let's skip the next (start) test
534
-- end
535
-- end
536
-- if char == startlast and size >= startlength then
537
-- local done = true
538
-- local last = size
539
-- for i=startlength,1,-1 do
540
-- if startlist[i] ~= list[last] then
541
-- done = false
542
-- break
543
-- end
544
-- last = last - 1
545
-- end
546
-- if done then
547
-- depth = depth + 1
548
-- end
549
-- end
550
-- else
551
-- local t = gettoken()
552
-- if t then
553
-- -- we're skipping leading stuff, like obeyedlines and relaxes
554
-- if experiment and size > 0 then
555
-- -- we're probably in a macro
556
-- local char = tochar[getcommand(t)]
557
-- if char then
558
-- size = size + 1 ; list[size] = char
559
-- else
560
-- local csname = getcsname(t)
561
-- if csname == stop then
562
-- stoplength = 0
563
-- break
564
-- else
565
-- size = size + 1 ; list[size] = "\\"
566
-- size = size + 1 ; list[size] = csname
567
-- size = size + 1 ; list[size] = " "
568
-- end
569
-- end
570
-- else
571
-- -- ignore and hope for the best
572
-- end
573
-- else
574
-- break
575
-- end
576
-- end
577
-- end
578
-- local start = 1
579
-- local stop = size - stoplength - 1
580
-- -- not good enough: only empty lines, but even then we miss the leading
581
-- -- for verbatim
582
-- --
583
-- -- the next is not yet adapted to the new scanner ... we don't need lpeg here
584
-- --
585
-- for i=start,stop do
586
-- local li = list[i]
587
-- if lpegmatch(blackspace,li) then
588
-- -- keep going
589
-- elseif lpegmatch(eol,li) then
590
-- -- okay
591
-- start = i + 1
592
-- else
593
-- break
594
-- end
595
-- end
596
-- for i=stop,start,-1 do
597
-- if lpegmatch(whitespace,list[i]) then
598
-- stop = i - 1
599
-- else
600
-- break
601
-- end
602
-- end
603
-- --
604
-- if start <= stop then
605
-- return concat(list,"",start,stop)
606
-- else
607
-- return ""
608
-- end
609
-- end
610 611
-- function buffers.pickup(name,start,stop,finish,catcodes,doundent)
612
-- local data = tokens.pickup(start,stop)
613
-- if doundent or (autoundent and doundent == nil) then
614
-- data = buffers.undent(data)
615
-- end
616
-- buffers.assign(name,data,catcodes)
617
-- context(finish)
618
-- end
619 620
-- commands.pickupbuffer = buffers.pickup
621 622
tokens
.
pickup
=
pickup
623 624
implement
{
625
name
=
"
pickupbuffer
"
,
626
actions
=
function
(
)
627
-- let's pickup all here (no arguments)
628
local
name
=
scanstring
(
)
629
local
start
=
scanstring
(
)
630
local
stop
=
scanstring
(
)
631
local
finish
=
scanstring
(
)
632
local
catcodes
=
scaninteger
(
)
633
local
doundent
=
scanboolean
(
)
634
-- could be a scanner:
635
local
data
=
pickup
(
start
,
stop
)
636
if
doundent
or
(
autoundent
and
doundent
=
=
nil
)
then
637
data
=
undent
(
data
)
638
end
639
buffers
.
assign
(
name
,
data
,
catcodes
)
640
-- context[finish]()
641
context
(
finish
)
642
end
643
}
644 645
local
function
savebuffer
(
list
,
name
,
prefix
)
-- name is optional
646
if
not
list
or
list
=
=
"
"
then
647
list
=
name
648
end
649
if
not
name
or
name
=
=
"
"
then
650
name
=
list
651
end
652
local
content
=
collectcontent
(
list
,
nil
)
or
"
"
653
if
content
=
=
"
"
then
654
content
=
"
empty buffer
"
655
end
656
if
prefix
=
=
v_yes
then
657
name
=
addsuffix
(
tex
.
jobname
.
.
"
-
"
.
.
name
,
"
tmp
"
)
658
end
659
io
.
savedata
(
name
,
replacenewlines
(
content
)
)
660
end
661 662
implement
{
663
name
=
"
savebuffer
"
,
664
actions
=
savebuffer
,
665
arguments
=
"
3 strings
"
,
666
}
667 668
-- we can consider adding a size to avoid unlikely clashes
669 670
local
olddata
=
nil
671
local
newdata
=
nil
672
local
getrunner
=
sandbox
.
getrunner
673 674
local
runner
=
sandbox
.
registerrunner
{
675
name
=
"
run buffer
"
,
676
program
=
"
context
"
,
677
method
=
"
execute
"
,
678
template
=
jit
and
"
--purgeall --jit %filename%
"
or
"
--purgeall %filename%
"
,
679
reporter
=
report_typeset
,
680
checkers
=
{
681
filename
=
"
readable
"
,
682
}
683
}
684 685
local
function
runbuffer
(
name
,
encapsulate
,
runnername
,
suffixes
)
686
if
not
runnername
or
runnername
=
=
"
"
then
687
runnername
=
"
run buffer
"
688
end
689
local
suffix
=
"
pdf
"
690
if
type
(
suffixes
)
=
=
"
table
"
then
691
suffix
=
suffixes
[
1
]
692
elseif
type
(
suffixes
)
=
=
"
string
"
and
suffixes
~
=
"
"
then
693
suffix
=
suffixes
694
suffixes
=
{
suffix
}
695
else
696
suffixes
=
{
suffix
}
697
end
698
local
runner
=
getrunner
(
runnername
)
699
if
not
runner
then
700
report_typeset
(
"
unknown runner %a
"
,
runnername
)
701
return
702
end
703
if
not
olddata
then
704
olddata
=
getdata
(
"
buffers
"
,
"
runners
"
)
or
{
}
705
local
suffixes
=
olddata
.
suffixes
706
local
hashes
=
olddata
.
hashes
707
if
hashes
and
suffixes
then
708
for
k
,
hash
in
next
,
hashes
do
709
for
h
,
v
in
next
,
hash
do
710
for
s
,
v
in
next
,
suffixes
do
711
local
tmp
=
addsuffix
(
h
,
s
)
712
-- report_typeset("mark for deletion: %s",tmp)
713
registertempfile
(
tmp
)
714
end
715
end
716
end
717
end
718
end
719
if
not
newdata
then
720
newdata
=
{
721
version
=
environment
.
version
,
722
suffixes
=
{
}
,
723
hashes
=
{
}
,
724
}
725
setdata
{
726
name
=
"
buffers
"
,
727
tag
=
"
runners
"
,
728
data
=
newdata
,
729
}
730
end
731
local
oldhashes
=
olddata
.
hashes
or
{
}
732
local
newhashes
=
newdata
.
hashes
or
{
}
733
local
old
=
oldhashes
[
suffix
]
734
local
new
=
newhashes
[
suffix
]
735
if
not
old
then
736
old
=
{
}
737
oldhashes
[
suffix
]
=
old
738
for
hash
,
n
in
next
,
old
do
739
local
tag
=
formatters
[
"
%s-t-b-%s
"
]
(
tex
.
jobname
,
hash
)
740
local
tmp
=
addsuffix
(
tag
,
"
tmp
"
)
741
-- report_typeset("mark for deletion: %s",tmp)
742
registertempfile
(
tmp
)
-- to be sure
743
end
744
end
745
if
not
new
then
746
new
=
{
}
747
newhashes
[
suffix
]
=
new
748
end
749
local
names
=
getnames
(
name
)
750
local
content
=
collectcontent
(
names
,
nil
)
or
"
"
751
if
content
=
=
"
"
then
752
content
=
"
empty buffer
"
753
end
754
if
encapsulate
then
755
content
=
formatters
[
"
\\starttext\n%s\n\\stoptext\n
"
]
(
content
)
756
end
757
--
758
local
hash
=
md5hex
(
content
)
759
local
tag
=
formatters
[
"
%s-t-b-%s
"
]
(
nameonly
(
tex
.
jobname
)
,
hash
)
-- make sure we run on the local path
760
--
761
local
filename
=
addsuffix
(
tag
,
"
tmp
"
)
762
local
resultname
=
addsuffix
(
tag
,
suffix
)
763
--
764
if
new
[
tag
]
then
765
-- done
766
elseif
not
old
[
tag
]
or
olddata
.
version
~
=
newdata
.
version
or
not
isfile
(
resultname
)
then
767
if
trace_run
then
768
report_typeset
(
"
changes in %a, processing forced
"
,
name
)
769
end
770
savedata
(
filename
,
content
)
771
report_typeset
(
"
processing saved buffer %a\n
"
,
filename
)
772
runner
{
filename
=
filename
}
773
end
774
new
[
tag
]
=
(
new
[
tag
]
or
0
)
+
1
775
report_typeset
(
"
no changes in %a, processing skipped
"
,
name
)
776
registertempfile
(
filename
)
777
-- report_typeset("mark for persistence: %s",filename)
778
for
i
=
1
,
#
suffixes
do
779
local
suffix
=
suffixes
[
i
]
780
newdata
.
suffixes
[
suffix
]
=
true
781
local
tmp
=
addsuffix
(
tag
,
suffix
)
782
-- report_typeset("mark for persistance: %s",tmp)
783
registertempfile
(
tmp
,
nil
,
true
)
784
end
785
--
786
return
resultname
-- first result
787
end
788 789
local
f_getbuffer
=
formatters
[
"
buffer.%s
"
]
790 791
local
function
getbuffer
(
name
)
792
local
str
=
getcontent
(
name
)
793
if
str
~
=
"
"
then
794
-- characters.showstring(str)
795
ctx_viafile
(
str
,
f_getbuffer
(
validstring
(
name
,
"
noname
"
)
)
)
796
end
797
end
798 799
local
function
getbuffermkvi
(
name
)
-- rather direct !
800
ctx_viafile
(
resolvers
.
macros
.
preprocessed
(
getcontent
(
name
)
)
,
formatters
[
"
buffer.%s.mkiv
"
]
(
validstring
(
name
,
"
noname
"
)
)
)
801
end
802 803
local
function
gettexbuffer
(
name
)
804
local
buffer
=
name
and
cache
[
name
]
805
if
buffer
and
buffer
.
data
~
=
"
"
then
806
ctx_pushcatcodetable
(
)
807
if
buffer
.
catcodes
=
=
txtcatcodes
then
808
ctx_setcatcodetable
(
txtcatcodes
)
809
else
810
ctx_setcatcodetable
(
ctxcatcodes
)
811
end
812
-- context(function() ctx_viafile(buffer.data) end)
813
ctx_getbuffer
{
name
}
-- viafile flushes too soon
814
ctx_popcatcodetable
(
)
815
end
816
end
817 818
buffers
.
get
=
getbuffer
819
buffers
.
getmkiv
=
getbuffermkiv
820
buffers
.
gettexbuffer
=
gettexbuffer
821
buffers
.
run
=
runbuffer
822 823
implement
{
name
=
"
getbufferctxlua
"
,
actions
=
loadcontent
,
arguments
=
"
string
"
}
824
implement
{
name
=
"
getbuffer
"
,
actions
=
getbuffer
,
arguments
=
"
string
"
}
825
implement
{
name
=
"
getbuffermkvi
"
,
actions
=
getbuffermkvi
,
arguments
=
"
string
"
}
826
implement
{
name
=
"
gettexbuffer
"
,
actions
=
gettexbuffer
,
arguments
=
"
string
"
}
827 828
interfaces
.
implement
{
829
name
=
"
getbuffercontent
"
,
830
arguments
=
"
string
"
,
831
actions
=
{
getcontent
,
context
}
,
832
}
833 834
implement
{
835
name
=
"
typesetbuffer
"
,
836
actions
=
{
runbuffer
,
context
}
,
837
arguments
=
{
"
string
"
,
true
}
838
}
839 840
implement
{
841
name
=
"
runbuffer
"
,
842
actions
=
{
runbuffer
,
context
}
,
843
arguments
=
{
"
string
"
,
false
,
"
string
"
}
844
}
845 846
implement
{
847
name
=
"
doifelsebuffer
"
,
848
actions
=
{
exists
,
commands
.
doifelse
}
,
849
arguments
=
"
string
"
850
}
851 852
-- This only used for mp buffers and is a kludge. Don't change the
853
-- texprint into texsprint as it fails because "p<nl>enddef" becomes
854
-- "penddef" then.
855 856
implement
{
857
name
=
"
feedback
"
,
-- bad name, maybe rename to injectbuffercontent
858
actions
=
{
collectcontent
,
ctx_printlines
}
,
859
arguments
=
"
string
"
860
}
861 862
do
863 864
local
context
=
context
865
local
ctxcore
=
context
.
core
866 867
local
ctx_startbuffer
=
ctxcore
.
startbuffer
868
local
ctx_stopbuffer
=
ctxcore
.
stopbuffer
869 870
local
ctx_startcollecting
=
context
.
startcollecting
871
local
ctx_stopcollecting
=
context
.
stopcollecting
872 873
function
ctxcore
.
startbuffer
(
...
)
874
ctx_startcollecting
(
)
875
ctx_startbuffer
(
...
)
876
end
877 878
function
ctxcore
.
stopbuffer
(
)
879
ctx_stopbuffer
(
)
880
ctx_stopcollecting
(
)
881
end
882 883
end
884 885
-- moved here:
886 887
function
buffers
.
samplefile
(
name
)
888
if
not
buffers
.
exists
(
name
)
then
889
buffers
.
assign
(
name
,
io
.
loaddata
(
resolvers
.
findfile
(
name
)
)
)
890
end
891
buffers
.
get
(
name
)
892
end
893 894
implement
{
895
name
=
"
samplefile
"
,
-- bad name, maybe rename to injectbuffercontent
896
actions
=
buffers
.
samplefile
,
897
arguments
=
"
string
"
898
}
899