aboutsummaryrefslogtreecommitdiffhomepage
path: root/lexlua/fsharp.lua
diff options
context:
space:
mode:
authormitchell <unknown>2020-04-25 16:26:31 -0400
committermitchell <unknown>2020-04-25 16:26:31 -0400
commitfad15f79b1230b3076be515d6894c8919562809b (patch)
tree72c848ef02c3331de5ca54eff7adaea3a9a6fb88 /lexlua/fsharp.lua
parent1fd02a367dec125c0b49dd9246a0928433866b96 (diff)
downloadscintilla-mirror-fad15f79b1230b3076be515d6894c8919562809b.tar.gz
Reformatted Lua LPeg lexers and added new convenience functions and pattern.
`lexer.range()` replaces `lexer.delimited_range()` and `lexer.nested_pair()`. `lexer.to_eol()` replaces `patt * lexer.nonnewline^0` constructs. `lexer.number` replaces `lexer.float + lexer.integer`. Also added unit tests for lexer functions.
Diffstat (limited to 'lexlua/fsharp.lua')
-rw-r--r--lexlua/fsharp.lua18
1 files changed, 10 insertions, 8 deletions
diff --git a/lexlua/fsharp.lua b/lexlua/fsharp.lua
index d8ecdc628..b80bf37e4 100644
--- a/lexlua/fsharp.lua
+++ b/lexlua/fsharp.lua
@@ -34,26 +34,28 @@ lex:add_rule('type', token(lexer.TYPE, word_match[[
lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
-- Strings.
-lex:add_rule('string', token(lexer.STRING, lexer.delimited_range("'", true) +
- lexer.delimited_range('"', true)))
+local sq_str = lexer.range("'", true)
+local dq_str = lexer.range('"', true)
+lex:add_rule('string', token(lexer.STRING, sq_str + dq_str))
-- Comments.
-lex:add_rule('comment', token(lexer.COMMENT, '//' * lexer.nonnewline^0 +
- lexer.nested_pair('(*', '*)')))
+local line_comment = lexer.to_eol('//')
+local block_comment = lexer.range('(*', '*)', false, false, true)
+lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
-- Numbers.
-lex:add_rule('number', token(lexer.NUMBER, (lexer.float +
- lexer.integer * S('uUlL')^-1)))
+lex:add_rule('number', token(lexer.NUMBER,
+ (lexer.float + lexer.integer * S('uUlL')^-1)))
-- Preprocessor.
local preproc_word = word_match[[
else endif endregion if ifdef ifndef light region
]]
lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') *
- S('\t ')^0 * preproc_word))
+ S('\t ')^0 * preproc_word))
-- Operators.
lex:add_rule('operator', token(lexer.OPERATOR,
- S('=<>+-*/^.,:;~!@#%^&|?[](){}')))
+ S('=<>+-*/^.,:;~!@#%^&|?[](){}')))
return lex