aboutsummaryrefslogtreecommitdiffhomepage
path: root/lexlua/pike.lua
diff options
context:
space:
mode:
authormitchell <unknown>2020-04-25 16:26:31 -0400
committermitchell <unknown>2020-04-25 16:26:31 -0400
commitfad15f79b1230b3076be515d6894c8919562809b (patch)
tree72c848ef02c3331de5ca54eff7adaea3a9a6fb88 /lexlua/pike.lua
parent1fd02a367dec125c0b49dd9246a0928433866b96 (diff)
downloadscintilla-mirror-fad15f79b1230b3076be515d6894c8919562809b.tar.gz
Reformatted Lua LPeg lexers and added new convenience functions and pattern.
`lexer.range()` replaces `lexer.delimited_range()` and `lexer.nested_pair()`. `lexer.to_eol()` replaces `patt * lexer.nonnewline^0` constructs. `lexer.number` replaces `lexer.float + lexer.integer`. Also added unit tests for lexer functions.
Diffstat (limited to 'lexlua/pike.lua')
-rw-r--r--lexlua/pike.lua18
1 files changed, 9 insertions, 9 deletions
diff --git a/lexlua/pike.lua b/lexlua/pike.lua
index c17d1b4b4..3dff044ac 100644
--- a/lexlua/pike.lua
+++ b/lexlua/pike.lua
@@ -29,21 +29,21 @@ lex:add_rule('type', token(lexer.TYPE, word_match[[
lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
-- Strings.
-lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
- lexer.delimited_range('"', true) +
- '#' * lexer.delimited_range('"')))
+local sq_str = lexer.range("'", true)
+local dq_str = P('#')^-1 * lexer.range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
-- Comments.
-lex:add_rule('comment', token(lexer.COMMENT, '//' * lexer.nonnewline_esc^0 +
- lexer.nested_pair('/*', '*/')))
+local line_comment = lexer.to_eol('//', true)
+local block_comment = lexer.range('/*', '*/', false, false, true)
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
-- Numbers.
-lex:add_rule('number', token(lexer.NUMBER, (lexer.float + lexer.integer) *
- S('lLdDfF')^-1))
+lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('lLdDfF')^-1))
-- Preprocessors.
-lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
- lexer.nonnewline^0))
+lex:add_rule('preprocessor', token(lexer.PREPROCESSOR,
+ lexer.to_eol(lexer.starts_line('#'))))
-- Operators.
lex:add_rule('operator', token(lexer.OPERATOR, S('<>=!+-/*%&|^~@`.,:;()[]{}')))