summaryrefslogtreecommitdiff
path: root/context/data/scite/lexers/scite-context-lexer-xml.lua
blob: 0441585c122738fac9fea17c838c89a427ae348f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
local info = {
    version   = 1.002,
    comment   = "scintilla lpeg lexer for metafun",
    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
    copyright = "PRAGMA ADE / ConTeXt Development Team",
    license   = "see context related readme files",
}

-- adapted from the regular context pretty printer code (after all, lexing
-- boils down to much of the same and there are only so many ways to do
-- things). Simplified a bit as we have a different nesting model.

-- todo: parse entities in attributes

local lexer = lexer
local global, string, table, lpeg = _G, string, table, lpeg
local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing
local P, R, S, V, C, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt
local type, setmetatable = type, setmetatable
local match, find = string.match, string.find

module(...)

local examplelexer     = _M

local whitespace       = examplelexer.WHITESPACE -- triggers states

local space            = lexer.space -- S(" \t\n\r\v\f")
local any              = lexer.any -- P(1)

local dquote           = P('"')
local squote           = P("'")
local colon            = P(":")
local semicolon        = P(";")
local equal            = P("=")
local ampersand        = P("&")

local name             = (R("az","AZ","09") + S('_-.'))^1
local openbegin        = P("<")
local openend          = P("</")
local closebegin       = P("/>") + P(">")
local closeend         = P(">")
local opencomment      = P("<!--")
local closecomment     = P("-->")
local openinstruction  = P("<?")
local closeinstruction = P("?>")
local opencdata        = P("<![CDATA[")
local closecdata       = P("]]>")

local entity           = ampersand * (1-semicolon)^1 * semicolon

local wordpattern = lexer.context.wordpattern
local checkedword = lexer.context.checkedword
local setwordlist = lexer.context.setwordlist
local validwords  = false

-- <?xml version="1.0" encoding="UTF-8" language="uk" ?>
--
-- <?context-xml-directive editor language us ?>

local p_preamble = Cmt(#P("<?xml "), function(input,i,_) -- todo: utf bomb
    if i < 10 then
        validwords = false
        local language = match(input,"^<%?xml[^>]*%?>%s*<%?context%-xml%-directive%s+editor%s+language%s+(..)%s+%?>")
        if not language then
            language = match(input,'^<%?xml[^>]*language=[\"\'](..)[\"\'][^>]*%?>',i)
        end
        if language then
            validwords = setwordlist(language)
        end
    end
    return false
end)


local p_word =
    Cmt(wordpattern, function(_,i,s)
        if validwords then
            return checkedword(validwords,s,i)
        else
            return true, { "text", i }
        end
    end)

local p_rest =
    token("default", any)

local p_text =
    token("default", (1-S("<>&")-space)^1)

local p_spacing =
    token(whitespace, space^1)

local p_optionalwhitespace =
    p_spacing^0

local p_localspacing =
    token("default", space^1)

-- Because we want a differently colored open and close we need an embedded lexer (whitespace
-- trigger). What is actually needed is that scintilla applies the current whitespace style.
-- Even using different style keys is not robust as they can be shared. I'll fix the main
-- lexer code.

local p_sstring =
    token("quote",dquote)
  * token("string",(1-dquote)^0)        -- different from context
  * token("quote",dquote)

local p_dstring =
    token("quote",squote)
  * token("string",(1-squote)^0)        -- different from context
  * token("quote",squote)

-- local p_comment =
--     token("command",opencomment)
--   * token("comment",(1-closecomment)^0) -- different from context
--   * token("command",closecomment)

-- local p_cdata =
--     token("command",opencdata)
--   * token("comment",(1-closecdata)^0)   -- different from context
--   * token("command",closecdata)

local commentlexer = lexer.load("scite-context-lexer-xml-comment")
local cdatalexer   = lexer.load("scite-context-lexer-xml-cdata")

lexer.embed_lexer(examplelexer, commentlexer, token("command",opencomment), token("command",closecomment))
lexer.embed_lexer(examplelexer, cdatalexer,   token("command",opencdata),   token("command",closecdata))

-- maybe cdata just text (then we don't need the extra lexer as we only have one comment then)

local p_name =
    token("plain",name)
  * (
        token("default",colon)
      * token("keyword",name)
    )^1
  + token("keyword",name)

local p_key = p_name

local p_attributes = (
    p_optionalwhitespace
  * p_key
  * p_optionalwhitespace
  * token("plain",equal)
  * p_optionalwhitespace
  * (p_dstring + p_sstring)
  * p_optionalwhitespace
)^0

local p_open =
    token("keyword",openbegin)
  * p_name
  * p_optionalwhitespace
  * p_attributes
  * token("keyword",closebegin)

local p_close =
    token("keyword",openend)
  * p_name
  * p_optionalwhitespace
  * token("keyword",closeend)

local p_entity =
    token("constant",entity)

local p_instruction =
    token("command",openinstruction * P("xml"))
  * p_optionalwhitespace
  * p_attributes
  * p_optionalwhitespace
  * token("command",closeinstruction)
  + token("command",openinstruction * name)
  * token("default",(1-closeinstruction)^1)
  * token("command",closeinstruction)

_rules = {
    { "whitespace",  p_spacing     },
    { "preamble",    p_preamble    },
    { "word",        p_word        },
--  { "text",        p_text        },
--  { "comment",     p_comment     },
--  { "cdata",       p_cdata       },
    { "instruction", p_instruction },
    { "close",       p_close       },
    { "open",        p_open        },
    { "entity",      p_entity      },
    { "rest",        p_rest        },
}

_tokenstyles = lexer.context.styleset

_foldsymbols = { -- somehow doesn't work yet
    _patterns = {
        "[<>]",
    },
    ["keyword"] = {
        ["<"] = 1, [">"] = -1,
    },
}