1local info = {
2 version = 1.002,
3 comment = "scintilla lpeg lexer for metafun",
4 author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
5 copyright = "PRAGMA ADE / ConTeXt Development Team",
6 license = "see context related readme files",
7}
8
9local global, string, table, lpeg = _G, string, table, lpeg
10local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
11local type = type
12
13local lexer = require("scite-context-lexer")
14local context = lexer.context
15local patterns = context.patterns
16
17local token = lexer.token
18local exact_match = lexer.exact_match
19
20local metafunlexer = lexer.new("mps","scite-context-lexer-mps")
21local whitespace = metafunlexer.whitespace
22
23local metapostprimitives = { }
24local metapostinternals = { }
25local metapostshortcuts = { }
26local metapostcommands = { }
27
28local metafuninternals = { }
29local metafunshortcuts = { }
30local metafuncommands = { }
31
32local mergedshortcuts = { }
33local mergedinternals = { }
34
35do
36
37 local definitions = context.loaddefinitions("scite-context-data-metapost")
38
39 if definitions then
40 metapostprimitives = definitions.primitives or { }
41 metapostinternals = definitions.internals or { }
42 metapostshortcuts = definitions.shortcuts or { }
43 metapostcommands = definitions.commands or { }
44 end
45
46 local definitions = context.loaddefinitions("scite-context-data-metafun")
47
48 if definitions then
49 metafuninternals = definitions.internals or { }
50 metafunshortcuts = definitions.shortcuts or { }
51 metafuncommands = definitions.commands or { }
52 end
53
54 for i=1,#metapostshortcuts do
55 mergedshortcuts[#mergedshortcuts+1] = metapostshortcuts[i]
56 end
57 for i=1,#metafunshortcuts do
58 mergedshortcuts[#mergedshortcuts+1] = metafunshortcuts[i]
59 end
60
61 for i=1,#metapostinternals do
62 mergedinternals[#mergedinternals+1] = metapostinternals[i]
63 end
64 for i=1,#metafuninternals do
65 mergedinternals[#mergedinternals+1] = metafuninternals[i]
66 end
67
68end
69
70local space = patterns.space
71local any = patterns.any
72
73local dquote = P('"')
74local cstoken = patterns.idtoken
75local mptoken = patterns.alpha
76local leftbrace = P("{")
77local rightbrace = P("}")
78local number = patterns.real
79
80local cstokentex = R("az","AZ","\127\255") + S("@!?_")
81
82
83
84local spacing = token(whitespace, space^1)
85local rest = token("default", any)
86local comment = token("comment", P("%") * (1-S("\n\r"))^0)
87local internal = token("reserved", exact_match(mergedshortcuts,false))
88local shortcut = token("data", exact_match(mergedinternals))
89
90local helper = token("command", exact_match(metafuncommands))
91local plain = token("plain", exact_match(metapostcommands))
92local quoted = token("quote", dquote)
93 * token("string", P(1-dquote)^0)
94 * token("quote", dquote)
95local separator = P(" ") + S("\n\r")^1
96local btex = (P("btex") + P("verbatimtex")) * separator
97local etex = separator * P("etex")
98local texstuff = token("quote", btex)
99 * token("string", (1-etex)^0)
100 * token("quote", etex)
101local primitive = token("primitive", exact_match(metapostprimitives))
102local identifier = token("default", cstoken^1)
103local number = token("number", number)
104local grouping = token("grouping", S("()[]{}"))
105local suffix = token("number", P("#@") + P("@#") + P("#"))
106local special = token("special", P("#@") + P("@#") + S("#()[]{}<>=:\""))
107local texlike = token("warning", P("\\") * cstokentex^1)
108local extra = token("extra", P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
109
110local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
111local texlike = token("embedded", P("\\") * (P("MP") + P("mp")) * mptoken^1)
112 * spacing^0
113 * token("grouping", leftbrace)
114 * token("default", (nested + (1-rightbrace))^0 )
115 * token("grouping", rightbrace)
116 + token("warning", P("\\") * cstokentex^1)
117
118
119
120local cldlexer = lexer.load("scite-context-lexer-cld","mps-cld")
121
122local startlua = P("lua") * space^0 * P('(') * space^0 * P('"')
123local stoplua = P('"') * space^0 * P(')')
124
125local startluacode = token("embedded", startlua)
126local stopluacode = #stoplua * token("embedded", stoplua)
127
128lexer.embed_lexer(metafunlexer, cldlexer, startluacode, stopluacode)
129
130local luacall = token("embedded",P("lua") * ( P(".") * R("az","AZ","__")^1 )^1)
131
132local keyword = token("default", (R("AZ","az","__")^1) * # P(space^0 * P("=")))
133
134metafunlexer._rules = {
135 { "whitespace", spacing },
136 { "comment", comment },
137 { "keyword", keyword },
138 { "internal", internal },
139 { "shortcut", shortcut },
140 { "luacall", luacall },
141 { "helper", helper },
142 { "plain", plain },
143 { "primitive", primitive },
144 { "texstuff", texstuff },
145 { "suffix", suffix },
146 { "identifier", identifier },
147 { "number", number },
148 { "quoted", quoted },
149
150 { "special", special },
151 { "texlike", texlike },
152 { "extra", extra },
153 { "rest", rest },
154}
155
156metafunlexer._tokenstyles = context.styleset
157
158metafunlexer._foldpattern = patterns.lower^2
159
160metafunlexer._foldsymbols = {
161 _patterns = {
162 "[a-z][a-z]+",
163 },
164 ["plain"] = {
165 ["beginfig"] = 1,
166 ["endfig"] = -1,
167 ["beginglyph"] = 1,
168 ["endglyph"] = -1,
169
170
171 },
172 ["primitive"] = {
173 ["def"] = 1,
174 ["vardef"] = 1,
175 ["primarydef"] = 1,
176 ["secondarydef" ] = 1,
177 ["tertiarydef"] = 1,
178 ["enddef"] = -1,
179 ["if"] = 1,
180 ["fi"] = -1,
181 ["for"] = 1,
182 ["forever"] = 1,
183 ["endfor"] = -1,
184 }
185}
186
187
188
189return metafunlexer
190 |