scite-context-lexer-json.lua /size: 2914 b    last modification: 2020-07-01 14:35
1local info = {
2    version   = 1.002,
3    comment   = "scintilla lpeg lexer for json",
4    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
5    copyright = "PRAGMA ADE / ConTeXt Development Team",
6    license   = "see context related readme files",
7}
8
9local global, string, table, lpeg = _G, string, table, lpeg
10local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
11local type = type
12
13local lexer       = require("scite-context-lexer")
14local context     = lexer.context
15local patterns    = context.patterns
16
17local token       = lexer.token
18local exact_match = lexer.exact_match
19
20local jsonlexer   = lexer.new("json","scite-context-lexer-json")
21local whitespace  = jsonlexer.whitespace
22
23local anything     = patterns.anything
24local comma        = P(",")
25local colon        = P(":")
26local escape       = P("\\")
27----- single       = P("'")
28local double       = P('"')
29local openarray    = P('[')
30local closearray   = P(']')
31local openhash     = P('{')
32local closehash    = P('}')
33----- lineending   = S("\n\r")
34local space        = S(" \t\n\r\f")
35local spaces       = space^1
36local operator     = S(':,{}[]')
37local fence        = openarray + closearray + openhash + closehash
38
39local escape_un    = P("\\u") * S("09","AF","af")
40local escape_bs    = P("\\") * P(1)
41----- content      = (escape_un + escape_bs + (1-double))^0
42local content      = (escape_bs + (1-double))^0
43
44local reserved     = P("true")
45                   + P("false")
46                   + P("null")
47
48local integer      = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
49local float        = patterns.float
50
51local t_number     = token("number", float + integer)
52                   * (token("error",R("AZ","az","__")^1))^0
53
54local t_spacing    = token(whitespace, space^1)
55local t_optionalws = token("default", space^1)^0
56
57local t_operator   = token("special", operator)
58
59local t_string     = token("operator",double)
60                   * token("string",content)
61                   * token("operator",double)
62
63local t_key        = token("operator",double)
64                   * token("text",content)
65                   * token("operator",double)
66                   * t_optionalws
67                   * token("operator",colon)
68
69local t_fences     = token("operator",fence) -- grouping
70
71local t_reserved   = token("primitive",reserved)
72
73local t_rest       = token("default",anything)
74
75jsonlexer._rules = {
76    { "whitespace", t_spacing  },
77    { "reserved",   t_reserved },
78    { "key",        t_key      },
79    { "number",     t_number   },
80    { "string",     t_string   },
81    { "fences",     t_fences   },
82    { "operator",   t_operator },
83    { "rest",       t_rest     },
84}
85
86jsonlexer._tokenstyles = context.styleset
87
88jsonlexer._foldpattern = fence
89
90jsonlexer._foldsymbols = {
91    _patterns = {
92        "{", "}",
93        "[", "]",
94    },
95    ["grouping"] = {
96        ["{"] = 1, ["}"] = -1,
97        ["["] = 1, ["]"] = -1,
98    },
99}
100
101return jsonlexer
102