scite-context-lexer-json.lua /size: 2748 b    last modification: 2021-10-28 13:49
1local info = {
2    version   = 1.002,
3    comment   = "scintilla lpeg lexer for json",
4    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
5    copyright = "PRAGMA ADE / ConTeXt Development Team",
6    license   = "see context related readme files",
7}
8
9local lpeg = lpeg
10local P, R, S = lpeg.P, lpeg.R, lpeg.S
11
12local lexers         = require("scite-context-lexer")
13
14local patterns       = lexers.patterns
15local token          = lexers.token
16
17local jsonlexer      = lexers.new("json","scite-context-lexer-json")
18local jsonwhitespace = jsonlexer.whitespace
19
20local anything     = patterns.anything
21local comma        = P(",")
22local colon        = P(":")
23local escape       = P("\\")
24----- single       = P("'")
25local double       = P('"')
26local openarray    = P('[')
27local closearray   = P(']')
28local openhash     = P('{')
29local closehash    = P('}')
30----- lineending   = S("\n\r")
31local space        = S(" \t\n\r\f")
32local spaces       = space^1
33local operator     = S(':,{}[]')
34local fence        = openarray + closearray + openhash + closehash
35
36local escape_un    = P("\\u") * S("09","AF","af")
37local escape_bs    = P("\\") * P(1)
38----- content      = (escape_un + escape_bs + (1-double))^0
39local content      = (escape_bs + (1-double))^0
40
41local reserved     = P("true")
42                   + P("false")
43                   + P("null")
44
45local integer      = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
46local float        = patterns.float
47
48local t_number     = token("number",     float + integer)
49                   * (token("error",     R("AZ","az","__")^1))^0
50
51local t_spacing    = token("whitespace", space^1)
52local t_optionalws = token("default",    space^1)^0
53
54local t_operator   = token("special",    operator)
55
56local t_string     = token("operator",   double)
57                   * token("string",     content)
58                   * token("operator",   double)
59
60local t_key        = token("operator",   double)
61                   * token("text",       content)
62                   * token("operator",   double)
63                   * t_optionalws
64                   * token("operator",   colon)
65
66local t_fences     = token("operator",   fence) -- grouping
67
68local t_reserved   = token("primitive",  reserved)
69
70local t_rest       = token("default",    anything)
71
72jsonlexer.rules = {
73    { "whitespace", t_spacing  },
74    { "reserved",   t_reserved },
75    { "key",        t_key      },
76    { "number",     t_number   },
77    { "string",     t_string   },
78    { "fences",     t_fences   },
79    { "operator",   t_operator },
80    { "rest",       t_rest     },
81}
82
83jsonlexer.folding = {
84    ["{"] = { ["grouping"] =  1 },
85    ["}"] = { ["grouping"] = -1 },
86    ["["] = { ["grouping"] =  1 },
87    ["]"] = { ["grouping"] = -1 },
88}
89
90return jsonlexer
91