summaryrefslogtreecommitdiff
path: root/context/data/scite/context/lexers/scite-context-lexer-json.lua
blob: c648b132ab2f1d2ba147e9f8a1fece839f0a33f7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
local info = {
    version   = 1.002,
    comment   = "scintilla lpeg lexer for json",
    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
    copyright = "PRAGMA ADE / ConTeXt Development Team",
    license   = "see context related readme files",
}

local lpeg = lpeg
local P, R, S = lpeg.P, lpeg.R, lpeg.S

local lexers         = require("scite-context-lexer")

local patterns       = lexers.patterns
local token          = lexers.token

local jsonlexer      = lexers.new("json","scite-context-lexer-json")
local jsonwhitespace = jsonlexer.whitespace

local anything     = patterns.anything
local comma        = P(",")
local colon        = P(":")
local escape       = P("\\")
----- single       = P("'")
local double       = P('"')
local openarray    = P('[')
local closearray   = P(']')
local openhash     = P('{')
local closehash    = P('}')
----- lineending   = S("\n\r")
local space        = S(" \t\n\r\f")
local spaces       = space^1
local operator     = S(':,{}[]')
local fence        = openarray + closearray + openhash + closehash

local escape_un    = P("\\u") * S("09","AF","af")
local escape_bs    = P("\\") * P(1)
----- content      = (escape_un + escape_bs + (1-double))^0
local content      = (escape_bs + (1-double))^0

local reserved     = P("true")
                   + P("false")
                   + P("null")

local integer      = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
local float        = patterns.float

local t_number     = token("number",     float + integer)
                   * (token("error",     R("AZ","az","__")^1))^0

local t_spacing    = token("whitespace", space^1)
local t_optionalws = token("default",    space^1)^0

local t_operator   = token("special",    operator)

local t_string     = token("operator",   double)
                   * token("string",     content)
                   * token("operator",   double)

local t_key        = token("operator",   double)
                   * token("text",       content)
                   * token("operator",   double)
                   * t_optionalws
                   * token("operator",   colon)

local t_fences     = token("operator",   fence) -- grouping

local t_reserved   = token("primitive",  reserved)

local t_rest       = token("default",    anything)

jsonlexer.rules = {
    { "whitespace", t_spacing  },
    { "reserved",   t_reserved },
    { "key",        t_key      },
    { "number",     t_number   },
    { "string",     t_string   },
    { "fences",     t_fences   },
    { "operator",   t_operator },
    { "rest",       t_rest     },
}

jsonlexer.folding = {
    ["{"] = { ["grouping"] =  1 },
    ["}"] = { ["grouping"] = -1 },
    ["["] = { ["grouping"] =  1 },
    ["]"] = { ["grouping"] = -1 },
}

return jsonlexer