1local info = {
2 version = 1.002,
3 comment = "scintilla lpeg lexer for metafun",
4 author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
5 copyright = "PRAGMA ADE / ConTeXt Development Team",
6 license = "see context related readme files",
7}
8
9local global, string, table, lpeg = _G, string, table, lpeg
10local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
11local type = type
12
13local lexers = require("scite-context-lexer")
14
15local patterns = lexers.patterns
16local token = lexers.token
17
18local metafunlexer = lexers.new("mps","scite-context-lexer-mps")
19local metafunwhitespace = metafunlexer.whitespace
20
21local metapostprimitives = { }
22local metapostinternals = { }
23local metapostshortcuts = { }
24local metapostcommands = { }
25
26local metafuninternals = { }
27local metafunshortcuts = { }
28local metafuncommands = { }
29
30local mergedshortcuts = { }
31local mergedinternals = { }
32
33do
34
35 local definitions = lexers.loaddefinitions("scite-context-data-metapost")
36
37 if definitions then
38 metapostprimitives = definitions.primitives or { }
39 metapostinternals = definitions.internals or { }
40 metapostshortcuts = definitions.shortcuts or { }
41 metapostcommands = definitions.commands or { }
42 end
43
44 local definitions = lexers.loaddefinitions("scite-context-data-metafun")
45
46 if definitions then
47 metafuninternals = definitions.internals or { }
48 metafunshortcuts = definitions.shortcuts or { }
49 metafuncommands = definitions.commands or { }
50 end
51
52 for i=1,#metapostshortcuts do
53 mergedshortcuts[#mergedshortcuts+1] = metapostshortcuts[i]
54 end
55 for i=1,#metafunshortcuts do
56 mergedshortcuts[#mergedshortcuts+1] = metafunshortcuts[i]
57 end
58
59 for i=1,#metapostinternals do
60 mergedinternals[#mergedinternals+1] = metapostinternals[i]
61 end
62 for i=1,#metafuninternals do
63 mergedinternals[#mergedinternals+1] = metafuninternals[i]
64 end
65
66end
67
68local space = patterns.space
69local any = patterns.any
70local exactmatch = patterns.exactmatch
71
72local dquote = P('"')
73local cstoken = patterns.idtoken
74local mptoken = patterns.alpha
75local leftbrace = P("{")
76local rightbrace = P("}")
77local number = patterns.real
78
79local cstokentex = R("az","AZ","\127\255") + S("@!?_")
80
81
82
83local spacing = token(metafunwhitespace, space^1)
84
85local rest = token("default", any)
86local comment = token("comment", P("%") * (1-S("\n\r"))^0)
87local internal = token("reserved", exactmatch(mergedshortcuts))
88local shortcut = token("data", exactmatch(mergedinternals))
89
90local helper = token("command", exactmatch(metafuncommands))
91local plain = token("plain", exactmatch(metapostcommands))
92local quoted = token("quote", dquote)
93 * token("string", P(1-dquote)^0)
94 * token("quote", dquote)
95local separator = P(" ") + S("\n\r")^1
96local btex = (P("btex") + P("verbatimtex")) * separator
97local etex = separator * P("etex")
98local texstuff = token("quote", btex)
99 * token("string", (1-etex)^0)
100 * token("quote", etex)
101local primitive = token("primitive", exactmatch(metapostprimitives))
102local identifier = token("default", cstoken^1)
103local number = token("number", number)
104local grouping = token("grouping", S("()[]{}"))
105local suffix = token("number", P("#@") + P("@#") + P("#"))
106local special = token("special", P("#@") + P("@#") + S("#()[]{}<>=:\""))
107local texlike = token("warning", P("\\") * cstokentex^1)
108local extra = token("extra", P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
109
110local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
111local texlike = token("embedded", P("\\") * (P("MP") + P("mp")) * mptoken^1)
112 * spacing^0
113 * token("grouping", leftbrace)
114 * token("default", (nested + (1-rightbrace))^0 )
115 * token("grouping", rightbrace)
116 + token("warning", P("\\") * cstokentex^1)
117
118
119
120local cldlexer = lexers.load("scite-context-lexer-cld","mps-cld")
121
122local startlua = P("lua") * space^0 * P('(') * space^0 * P('"')
123local stoplua = P('"') * space^0 * P(')')
124
125local startluacode = token("embedded", startlua)
126local stopluacode = #stoplua * token("embedded", stoplua)
127
128lexers.embed(metafunlexer, cldlexer, startluacode, stopluacode)
129
130local luacall = token("embedded",P("lua") * ( P(".") * R("az","AZ","__")^1 )^1)
131
132local keyword = token("default", (R("AZ","az","__")^1) * # P(space^0 * P("=")))
133
134metafunlexer.rules = {
135 { "whitespace", spacing },
136 { "comment", comment },
137 { "keyword", keyword },
138 { "internal", internal },
139 { "shortcut", shortcut },
140 { "luacall", luacall },
141 { "helper", helper },
142 { "plain", plain },
143 { "primitive", primitive },
144 { "texstuff", texstuff },
145 { "suffix", suffix },
146 { "identifier", identifier },
147 { "number", number },
148 { "quoted", quoted },
149
150 { "special", special },
151 { "texlike", texlike },
152 { "extra", extra },
153 { "rest", rest },
154}
155
156metafunlexer.folding = {
157 ["beginfig"] = { ["plain"] = 1 },
158 ["endfig"] = { ["plain"] = -1 },
159 ["beginglyph"] = { ["plain"] = 1 },
160 ["endglyph"] = { ["plain"] = -1 },
161
162
163 ["def"] = { ["primitive"] = 1 },
164 ["vardef"] = { ["primitive"] = 1 },
165 ["primarydef"] = { ["primitive"] = 1 },
166 ["secondarydef" ] = { ["primitive"] = 1 },
167 ["tertiarydef"] = { ["primitive"] = 1 },
168 ["enddef"] = { ["primitive"] = -1 },
169 ["if"] = { ["primitive"] = 1 },
170 ["fi"] = { ["primitive"] = -1 },
171 ["for"] = { ["primitive"] = 1 },
172 ["forever"] = { ["primitive"] = 1 },
173 ["endfor"] = { ["primitive"] = -1 },
174}
175
176return metafunlexer
177 |