diff options
author | mitchell <unknown> | 2020-04-25 16:26:31 -0400 |
---|---|---|
committer | mitchell <unknown> | 2020-04-25 16:26:31 -0400 |
commit | fad15f79b1230b3076be515d6894c8919562809b (patch) | |
tree | 72c848ef02c3331de5ca54eff7adaea3a9a6fb88 /lexlua/scala.lua | |
parent | 1fd02a367dec125c0b49dd9246a0928433866b96 (diff) | |
download | scintilla-mirror-fad15f79b1230b3076be515d6894c8919562809b.tar.gz |
Reformatted Lua LPeg lexers and added new convenience functions and pattern.
`lexer.range()` replaces `lexer.delimited_range()` and `lexer.nested_pair()`.
`lexer.to_eol()` replaces `patt * lexer.nonnewline^0` constructs.
`lexer.number` replaces `lexer.float + lexer.integer`.
Also added unit tests for lexer functions.
Diffstat (limited to 'lexlua/scala.lua')
-rw-r--r-- | lexlua/scala.lua | 13 |
1 files changed, 6 insertions, 7 deletions
diff --git a/lexlua/scala.lua b/lexlua/scala.lua index 38d328b54..f2959396f 100644 --- a/lexlua/scala.lua +++ b/lexlua/scala.lua @@ -13,7 +13,7 @@ lex:add_rule('whitespace', ws) -- Classes. lex:add_rule('class', token(lexer.KEYWORD, P('class')) * ws^1 * - token(lexer.CLASS, lexer.word)) + token(lexer.CLASS, lexer.word)) -- Keywords. lex:add_rule('keyword', token(lexer.KEYWORD, word_match[[ @@ -37,18 +37,17 @@ lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) -- Strings. local symbol = "'" * lexer.word -local dq_str = lexer.delimited_range('"', true) -local tq_str = '"""' * (lexer.any - '"""')^0 * P('"""')^-1 +local dq_str = lexer.range('"', true) +local tq_str = lexer.range('"""') lex:add_rule('string', token(lexer.STRING, tq_str + symbol + dq_str)) -- Comments. -local line_comment = '//' * lexer.nonnewline_esc^0 -local block_comment = '/*' * (lexer.any - '*/')^0 * P('*/')^-1 +local line_comment = lexer.to_eol('//', true) +local block_comment = lexer.range('/*', '*/') lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) -- Numbers. -lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) * - S('LlFfDd')^-1)) +lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlFfDd')^-1)) -- Operators. lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) |