summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
diff options
context:
space:
mode:
Diffstat (limited to 'Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua')
-rw-r--r--Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua161
1 files changed, 106 insertions, 55 deletions
diff --git a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
index cbe6d261d66..d67be2cd84a 100644
--- a/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/Master/texmf-dist/context/data/scite/lexers/scite-context-lexer-tex.lua
@@ -24,33 +24,26 @@ local info = {
-- local interface = props["keywordclass.macros.context.en"]
-- local interface = lexer.get_property("keywordclass.macros.context.en","")
- -- it seems that whitespace triggers the lexer when embedding happens, but this
- -- is quite fragile due to duplicate styles .. lexer.WHITESPACE is a number
- -- (initially) ... _NAME vs filename (but we don't want to overwrite files)
-
- -- this lexer does not care about other macro packages (one can of course add a fake
- -- interface but it's not on the agenda)
-
]]--
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
local global, string, table, lpeg = _G, string, table, lpeg
-local token, exact_match = lexer.token, lexer.exact_match
local P, R, S, V, C, Cmt, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Cp, lpeg.Cc, lpeg.Ct
local type, next = type, next
local find, match, lower, upper = string.find, string.match, string.lower, string.upper
--- module(...)
-
-local contextlexer = { _NAME = "tex", _FILENAME = "scite-context-lexer-tex" }
-local whitespace = lexer.WHITESPACE
+local lexer = require("lexer")
local context = lexer.context
+local patterns = context.patterns
+local inform = context.inform
-local cldlexer = lexer.load('scite-context-lexer-cld')
------ cldlexer = lexer.load('scite-context-lexer-lua')
-local mpslexer = lexer.load('scite-context-lexer-mps')
+local token = lexer.token
+local exact_match = lexer.exact_match
+
+local contextlexer = lexer.new("tex","scite-context-lexer-tex")
+local whitespace = contextlexer.whitespace
+
+local cldlexer = lexer.load("scite-context-lexer-cld")
+local mpslexer = lexer.load("scite-context-lexer-mps")
local commands = { en = { } }
local primitives = { }
@@ -64,7 +57,9 @@ do -- todo: only once, store in global
local definitions = context.loaddefinitions("scite-context-data-interfaces")
if definitions then
+ local list = { }
for interface, list in next, definitions do
+ list[#list+1] = interface
local c = { }
for i=1,#list do
c[list[i]] = true
@@ -79,6 +74,7 @@ do -- todo: only once, store in global
end
commands[interface] = c
end
+ inform("context user interfaces '%s' supported",table.concat(list," "))
end
local definitions = context.loaddefinitions("scite-context-data-context")
@@ -146,13 +142,16 @@ local validminimum = 3
-- % language=uk
-local knownpreamble = Cmt(#P("% "), function(input,i,_) -- todo : utfbomb
+-- fails (empty loop message) ... latest lpeg issue?
+
+local knownpreamble = Cmt(P("% "), function(input,i,_) -- todo : utfbomb, was #P("% ")
if i < 10 then
validwords, validminimum = false, 3
- local s, e, word = find(input,'^(.+)[\n\r]',i) -- combine with match
+ local s, e, word = find(input,"^(.+)[\n\r]",i) -- combine with match
if word then
local interface = match(word,"interface=([a-z]+)")
- if interface then
+ if interface and #interface == 2 then
+ inform("enabling context user interface '%s'",interface)
currentcommands = commands[interface] or commands.en or { }
end
local language = match(word,"language=([a-z]+)")
@@ -170,7 +169,7 @@ end)
-- local helpers_hash = { } for i=1,#helpers do helpers_hash [helpers [i]] = true end
-- local primitives_hash = { } for i=1,#primitives do primitives_hash[primitives[i]] = true end
--- local specialword = Ct( P('\\') * Cmt( C(cstoken^1), function(input,i,s)
+-- local specialword = Ct( P("\\") * Cmt( C(cstoken^1), function(input,i,s)
-- if currentcommands[s] then
-- return true, "command", i
-- elseif constants_hash[s] then
@@ -184,7 +183,7 @@ end)
-- end
-- end) )
--- local specialword = P('\\') * Cmt( C(cstoken^1), function(input,i,s)
+-- local specialword = P("\\") * Cmt( C(cstoken^1), function(input,i,s)
-- if currentcommands[s] then
-- return true, { "command", i }
-- elseif constants_hash[s] then
@@ -202,11 +201,11 @@ end)
-- 10pt
-local commentline = P('%') * (1-S("\n\r"))^0
+local commentline = P("%") * (1-S("\n\r"))^0
local endline = S("\n\r")^1
-local space = lexer.space -- S(" \n\r\t\f\v")
-local any = lexer.any
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
local backslash = P("\\")
local hspace = S(" \t")
@@ -219,7 +218,7 @@ local p_command = backslash * knowncommand
local p_constant = backslash * exact_match(constants)
local p_helper = backslash * exact_match(helpers)
local p_primitive = backslash * exact_match(primitives)
-local p_ifprimitive = P('\\if') * cstoken^1
+local p_ifprimitive = P("\\if") * cstoken^1
local p_csname = backslash * (cstoken^1 + P(1))
local p_grouping = S("{$}")
local p_special = S("#()[]<>=\"")
@@ -299,24 +298,24 @@ local p_invisible = invisibles^1
local spacing = token(whitespace, p_spacing )
-local rest = token('default', p_rest )
-local preamble = token('preamble', p_preamble )
-local comment = token('comment', p_comment )
-local command = token('command', p_command )
-local constant = token('data', p_constant )
-local helper = token('plain', p_helper )
-local primitive = token('primitive', p_primitive )
-local ifprimitive = token('primitive', p_ifprimitive)
-local reserved = token('reserved', p_reserved )
-local csname = token('user', p_csname )
-local grouping = token('grouping', p_grouping )
-local number = token('number', p_number )
- * token('constant', p_unit )
-local special = token('special', p_special )
-local reserved = token('reserved', p_reserved ) -- reserved internal preproc
-local extra = token('extra', p_extra )
-local invisible = token('invisible', p_invisible )
-local text = token('default', p_text )
+local rest = token("default", p_rest )
+local preamble = token("preamble", p_preamble )
+local comment = token("comment", p_comment )
+local command = token("command", p_command )
+local constant = token("data", p_constant )
+local helper = token("plain", p_helper )
+local primitive = token("primitive", p_primitive )
+local ifprimitive = token("primitive", p_ifprimitive)
+local reserved = token("reserved", p_reserved )
+local csname = token("user", p_csname )
+local grouping = token("grouping", p_grouping )
+local number = token("number", p_number )
+ * token("constant", p_unit )
+local special = token("special", p_special )
+local reserved = token("reserved", p_reserved ) -- reserved internal preproc
+local extra = token("extra", p_extra )
+local invisible = token("invisible", p_invisible )
+local text = token("default", p_text )
local word = p_word
----- startluacode = token("grouping", P("\\startluacode"))
@@ -390,18 +389,21 @@ contextlexer._reset_parser = function()
end
local luaenvironment = P("lua") * (P("setups") + P("code") + P(true))
+ + P("ctxfunction") * (P("definition") + P(true))
local inlinelua = P("\\") * (
- P("ctx") * ( P("lua") + P("command") + P("late") * (P("lua") + P("command")) )
- + P("cld") * ( P("command") + P("context") )
+ P("ctx") * (P("lua") + P("command") + P("late") * (P("lua") + P("command")) + P("function"))
+ + P("cld") * (P("command") + P("context"))
+ P("luaexpr")
+ (P("direct") + P("late")) * P("lua")
)
local startlua = P("\\start") * Cmt(luaenvironment,startdisplaylua)
+ + P("<?lua") * Cmt(P(true),startdisplaylua)
+ inlinelua * space^0 * ( Cmt(P("{"),startinlinelua) )
local stoplua = P("\\stop") * Cmt(luaenvironment,stopdisplaylua)
+ + P("?>") * Cmt(P(true),stopdisplaylua)
+ Cmt(P("{"),stopinlinelua_b)
+ Cmt(P("}"),stopinlinelua_e)
@@ -413,7 +415,7 @@ local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("
+ P("MPpositiongraphic")
local metafunenvironment = metafuncall -- ( P("use") + P("reusable") + P("unique") ) * ("MPgraphic")
- + P("MP") * ( P("code")+ P("page") + P("inclusions") + P("initializations") + P("definitions") + P("extensions") + P("graphic") )
+ + P("MP") * ( P("code")+ P("page") + P("inclusions") + P("initializations") + P("definitions") + P("extensions") + P("graphic") + P("calculation") )
local startmetafun = P("\\start") * metafunenvironment
local stopmetafun = P("\\stop") * metafunenvironment -- todo match start
@@ -432,9 +434,6 @@ local callers = token("embedded", P("\\") * metafuncall) * metafu
lexer.embed_lexer(contextlexer, cldlexer, startluacode, stopluacode)
lexer.embed_lexer(contextlexer, mpslexer, startmetafuncode, stopmetafuncode)
--- Watch the text grabber, after all, we're talking mostly of text (beware,
--- no punctuation here as it can be special. We might go for utf here.
-
contextlexer._rules = {
{ "whitespace", spacing },
{ "preamble", preamble },
@@ -458,11 +457,61 @@ contextlexer._rules = {
{ "rest", rest },
}
-contextlexer._tokenstyles = context.styleset
--- contextlexer._tokenstyles = context.stylesetcopy() -- experiment
+-- Watch the text grabber, after all, we're talking mostly of text (beware,
+-- no punctuation here as it can be special). We might go for utf here.
+
+local web = lexer.loadluafile("scite-context-lexer-web-snippets")
+
+if web then
+
+ lexer.inform("supporting web snippets in tex lexer")
+
+ contextlexer._rules_web = {
+ { "whitespace", spacing },
+ { "text", text }, -- non words
+ { "comment", comment },
+ { "constant", constant },
+ { "callers", callers },
+ { "helper", helper },
+ { "command", command },
+ { "primitive", primitive },
+ { "ifprimitive", ifprimitive },
+ { "reserved", reserved },
+ { "csname", csname },
+ { "grouping", grouping },
+ { "special", special },
+ { "extra", extra },
+ { "invisible", invisible },
+ { "web", web.pattern },
+ { "rest", rest },
+ }
+
+else
+
+ lexer.report("not supporting web snippets in tex lexer")
+
+ contextlexer._rules_web = {
+ { "whitespace", spacing },
+ { "text", text }, -- non words
+ { "comment", comment },
+ { "constant", constant },
+ { "callers", callers },
+ { "helper", helper },
+ { "command", command },
+ { "primitive", primitive },
+ { "ifprimitive", ifprimitive },
+ { "reserved", reserved },
+ { "csname", csname },
+ { "grouping", grouping },
+ { "special", special },
+ { "extra", extra },
+ { "invisible", invisible },
+ { "rest", rest },
+ }
--- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { cldlexer._NAME..'_whitespace', lexer.style_whitespace }
--- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { mpslexer._NAME..'_whitespace', lexer.style_whitespace }
+end
+
+contextlexer._tokenstyles = context.styleset
local environment = {
["\\start"] = 1, ["\\stop"] = -1,
@@ -493,4 +542,6 @@ contextlexer._foldsymbols = { -- these need to be style references
["grouping"] = group,
}
+-- context.inspect(contextlexer)
+
return contextlexer